load libraries

library(tidyverse)
## ── Attaching packages ─────────────────────────────────────── tidyverse 1.3.2 ──
## ✔ ggplot2 3.4.0      ✔ purrr   1.0.0 
## ✔ tibble  3.1.8      ✔ dplyr   1.0.10
## ✔ tidyr   1.2.1      ✔ stringr 1.5.0 
## ✔ readr   2.1.3      ✔ forcats 0.5.2 
## ── Conflicts ────────────────────────────────────────── tidyverse_conflicts() ──
## ✖ dplyr::filter() masks stats::filter()
## ✖ dplyr::lag()    masks stats::lag()
library(haven)
library(caret)
## Loading required package: lattice
## 
## Attaching package: 'caret'
## 
## The following object is masked from 'package:purrr':
## 
##     lift
library(nnet)
library(broom)
library(pROC)
## Type 'citation("pROC")' for a citation.
## 
## Attaching package: 'pROC'
## 
## The following objects are masked from 'package:stats':
## 
##     cov, smooth, var
library(rpart.plot)
## Loading required package: rpart

import the processed data

# load the processed  state data
df_part1 = read_csv("./data/df_a_m.csv")
## Rows: 775895 Columns: 24
## ── Column specification ────────────────────────────────────────────────────────
## Delimiter: ","
## chr  (1): sitename
## dbl (23): year, age, sex, grade, race4, bmi, qnothhpl, q85, q17, q21, q23, q...
## 
## ℹ Use `spec()` to retrieve the full column specification for this data.
## ℹ Specify the column types or set `show_col_types = FALSE` to quiet this message.
df_part2 = read_csv("./data/df_n_z.csv")
## Rows: 754495 Columns: 24
## ── Column specification ────────────────────────────────────────────────────────
## Delimiter: ","
## chr  (1): sitename
## dbl (23): year, age, sex, grade, race4, bmi, qnothhpl, q85, q17, q21, q23, q...
## 
## ℹ Use `spec()` to retrieve the full column specification for this data.
## ℹ Specify the column types or set `show_col_types = FALSE` to quiet this message.
# merge two state dataset
df_f_project = bind_rows(df_part1, df_part2)

# load the district dataset
df_district = read_csv("./data/df_district.csv")
## Rows: 504249 Columns: 24
## ── Column specification ────────────────────────────────────────────────────────
## Delimiter: ","
## chr  (1): sitename
## dbl (23): year, age, sex, grade, race4, bmi, qnothhpl, q85, q17, q21, q23, q...
## 
## ℹ Use `spec()` to retrieve the full column specification for this data.
## ℹ Specify the column types or set `show_col_types = FALSE` to quiet this message.

data cleaning

# the state data
df_fit_state <- 
  df_f_project %>% 
  filter(year == 2019) %>% 
  mutate(
  across(c("sex", "age", "grade", "race4", "qnothhpl"), as.factor),
  year = as.character(year)
  ) %>% 
  mutate_at(vars(starts_with("q")), as.factor) %>% 
  filter(q85 %in% c(1, 2, 3)) %>% 
  na.omit()

# the school distrcit data
df_fit_district <- 
  df_district %>% 
  filter(year == 2019) %>%
    mutate(
  across(c("sex", "age", "grade", "race4", "qnothhpl"), as.factor),
  year = as.character(year)
  ) %>% 
  mutate_at(vars(starts_with("q")), as.factor) %>%
  filter(q85 %in% c(1, 2, 3)) %>% 
  na.omit()

df_fit_state %>% 
  group_by(sitename) %>% 
  summarize(
    n_obs = n()
  )
## # A tibble: 12 × 2
##    sitename            n_obs
##    <chr>               <int>
##  1 Alabama (AL)          308
##  2 Arkansas (AR)         304
##  3 Illinois (IL)         398
##  4 Iowa (IA)             235
##  5 Kentucky (KY)         313
##  6 Michigan (MI)         582
##  7 Mississippi (MS)      265
##  8 Nebraska (NE)         201
##  9 Oklahoma (OK)         348
## 10 Pennsylvania (PA)     440
## 11 South Carolina (SC)   126
## 12 West Virginia (WV)    284
df_fit_district %>% 
  group_by(sitename) %>% 
  summarize(
    n_obs = n()
  )
## # A tibble: 15 × 2
##    sitename                     n_obs
##    <chr>                        <int>
##  1 Broward County, FL (FT)        168
##  2 Chicago, IL (CH)               157
##  3 Eaton Consortium, MI (EA)      129
##  4 Fort Worth, TX (FW)            237
##  5 Genesee Consortium, MI (GE)    166
##  6 Hillsborough County, FL (HL)   248
##  7 Los Angeles, CA (LO)           109
##  8 Newark, NJ (NW)                142
##  9 Orange County, FL (OL)         123
## 10 Palm Beach County, FL (PB)     305
## 11 Pasco County, FL (PS)          200
## 12 Philadelphia, PA (PH)          172
## 13 Portland, OR (PO)              213
## 14 San Francisco, CA (SF)         203
## 15 Shelby County, TN (ST)         160
summary(df_fit_district)
##    sitename             year           age     sex      grade    race4   
##  Length:2732        Length:2732        1:  0   1:1485   1: 276   1: 818  
##  Class :character   Class :character   2:  0   2:1247   2: 610   2: 581  
##  Mode  :character   Mode  :character   3:111            3: 825   3:1036  
##                                        4:421            4:1021   4: 297  
##                                        5:730                             
##                                        6:946                             
##                                        7:524                             
##       bmi        qnothhpl q85           q17       q21      q23      q26     
##  Min.   :13.58   1: 735   1: 715   1      :1912   1: 240   1: 414   1: 651  
##  1st Qu.:20.38   2:1997   2:1849   2      : 345   2:2299   2:2318   2:2081  
##  Median :22.49            3: 168   3      : 296   3:  91                    
##  Mean   :23.64                     4      :  89   4:  55                    
##  3rd Qu.:25.72                     8      :  39   5:  17                    
##  Max.   :58.89                     5      :  28   6:  30                    
##                                    (Other):  23                             
##  q30      q41      q47      q49      q50      q52      q53      q63     
##  1: 779   1:1535   1:1577   1:2197   1:2579   1:2703   1:2690   1:   0  
##  2:1953   2: 593   2: 374   2: 230   2:  81   2:  12   2:  20   2:1512  
##           3: 340   3: 293   3: 136   3:  48   3:   3   3:   5   3:1220  
##           4: 154   4: 162   4:  77   4:   8   4:   6   4:   1           
##           5:  83   5: 144   5:  30   5:   6   5:   1   5:   7           
##           6:  15   6: 182   6:  62   6:  10   6:   7   6:   9           
##           7:  12                                                        
##  q65      q87      q89     
##  1:   0   1: 686   1: 874  
##  2:1237   2:1940   2:1066  
##  3:1226   3: 106   3: 520  
##  4: 269            4: 104  
##                    5:  50  
##                    6:  14  
##                    7: 104
summary(df_fit_state)
##    sitename             year           age      sex      grade    race4   
##  Length:3804        Length:3804        1:   0   1:2028   1: 501   1:2396  
##  Class :character   Class :character   2:   5   2:1776   2: 848   2: 546  
##  Mode  :character   Mode  :character   3: 130            3:1222   3: 550  
##                                        4: 616            4:1233   4: 312  
##                                        5:1012                             
##                                        6:1254                             
##                                        7: 787                             
##       bmi        qnothhpl q85           q17       q21      q23      q26     
##  Min.   :13.25   1:1364   1: 770   1      :2686   1: 303   1: 843   1: 948  
##  1st Qu.:20.62   2:2440   2:2822   2      : 503   2:3238   2:2961   2:2856  
##  Median :22.89            3: 212   3      : 391   3: 112                    
##  Mean   :24.20                     4      :  96   4:  90                    
##  3rd Qu.:26.51                     8      :  60   5:  29                    
##  Max.   :54.40                     5      :  38   6:  32                    
##                                    (Other):  30                             
##  q30      q41      q47      q49      q50      q52      q53      q63     
##  1:1636   1:2031   1:2473   1:3038   1:3608   1:3760   1:3737   1:   0  
##  2:2168   2: 873   2: 427   2: 323   2: 101   2:  22   2:  27   2:2001  
##           3: 445   3: 313   3: 196   3:  44   3:   3   3:  10   3:1803  
##           4: 248   4: 193   4:  97   4:  24   4:   2   4:   7           
##           5: 150   5: 145   5:  69   5:   6   5:   2   5:   3           
##           6:  30   6: 253   6:  81   6:  21   6:  15   6:  20           
##           7:  27                                                        
##  q65      q87      q89     
##  1:   0   1: 946   1:1430  
##  2:1744   2:2724   2:1396  
##  3:1765   3: 134   3: 668  
##  4: 295            4: 147  
##                    5:  50  
##                    6:  12  
##                    7: 101

check variables with 0 variance levels and re-create new variables for un-balanced variables

# the modification for school district data
levels(df_fit_district$age)
## [1] "1" "2" "3" "4" "5" "6" "7"
levels(df_fit_district$q63)
## [1] "1" "2" "3"
levels(df_fit_district$q65)
## [1] "1" "2" "3" "4"
# drop the level with 0
df_fit_district = df_fit_district %>% 
  # Remove levels from the factor column
  filter(!(age %in% c("1", "2"))) %>% 
  filter(q63 != 1) %>% 
  filter(q65 != 1) %>% 
  mutate(
  # Remove the unused levels from the factor column
     age = factor(age),
     q63 = factor(q63),
     q65 = factor(q65),
  # cover q50, q52, q53 to two level variables
  # 1 = 1, and 2 = the rest of observations
     q50 = ifelse(q50 == 1, 1, 2),
     q50 = factor(q50),
     q52 = ifelse(q52 == 1, 1, 2),
     q52 = factor(q52),
     q53 = ifelse(q53 == 1, 1, 2),
     q53 = factor(q53)
  ) 

summary(df_fit_district)
##    sitename             year           age     sex      grade    race4   
##  Length:2732        Length:2732        3:111   1:1485   1: 276   1: 818  
##  Class :character   Class :character   4:421   2:1247   2: 610   2: 581  
##  Mode  :character   Mode  :character   5:730            3: 825   3:1036  
##                                        6:946            4:1021   4: 297  
##                                        7:524                             
##                                                                          
##                                                                          
##       bmi        qnothhpl q85           q17       q21      q23      q26     
##  Min.   :13.58   1: 735   1: 715   1      :1912   1: 240   1: 414   1: 651  
##  1st Qu.:20.38   2:1997   2:1849   2      : 345   2:2299   2:2318   2:2081  
##  Median :22.49            3: 168   3      : 296   3:  91                    
##  Mean   :23.64                     4      :  89   4:  55                    
##  3rd Qu.:25.72                     8      :  39   5:  17                    
##  Max.   :58.89                     5      :  28   6:  30                    
##                                    (Other):  23                             
##  q30      q41      q47      q49      q50      q52      q53      q63     
##  1: 779   1:1535   1:1577   1:2197   1:2579   1:2703   1:2690   2:1512  
##  2:1953   2: 593   2: 374   2: 230   2: 153   2:  29   2:  42   3:1220  
##           3: 340   3: 293   3: 136                                      
##           4: 154   4: 162   4:  77                                      
##           5:  83   5: 144   5:  30                                      
##           6:  15   6: 182   6:  62                                      
##           7:  12                                                        
##  q65      q87      q89     
##  2:1237   1: 686   1: 874  
##  3:1226   2:1940   2:1066  
##  4: 269   3: 106   3: 520  
##                    4: 104  
##                    5:  50  
##                    6:  14  
##                    7: 104
# the modification for state data
levels(df_fit_state$age)
## [1] "1" "2" "3" "4" "5" "6" "7"
levels(df_fit_state$q63)
## [1] "1" "2" "3"
levels(df_fit_state$q65)
## [1] "1" "2" "3" "4"
# drop the level with 0
df_fit_state = df_fit_state %>% 
  # Remove levels from the factor column
  filter(age != 1) %>% 
  filter(q63 != 1) %>% 
  filter(q65 != 1) %>% 
  mutate(
  # Remove the unused levels from the factor column
     age = factor(age),
     q63 = factor(q63),
     q65 = factor(q65),
  # cover q50, q52, q53 to two level variables
  # 1 = 1, and 2 = the rest of observations
     q50 = ifelse(q50 == 1, 1, 2),
     q50 = factor(q50),
     q52 = ifelse(q52 == 1, 1, 2),
     q52 = factor(q52),
     q53 = ifelse(q53 == 1, 1, 2),
     q53 = factor(q53)
  ) 

summary(df_fit_state)
##    sitename             year           age      sex      grade    race4   
##  Length:3804        Length:3804        2:   5   1:2028   1: 501   1:2396  
##  Class :character   Class :character   3: 130   2:1776   2: 848   2: 546  
##  Mode  :character   Mode  :character   4: 616            3:1222   3: 550  
##                                        5:1012            4:1233   4: 312  
##                                        6:1254                             
##                                        7: 787                             
##                                                                           
##       bmi        qnothhpl q85           q17       q21      q23      q26     
##  Min.   :13.25   1:1364   1: 770   1      :2686   1: 303   1: 843   1: 948  
##  1st Qu.:20.62   2:2440   2:2822   2      : 503   2:3238   2:2961   2:2856  
##  Median :22.89            3: 212   3      : 391   3: 112                    
##  Mean   :24.20                     4      :  96   4:  90                    
##  3rd Qu.:26.51                     8      :  60   5:  29                    
##  Max.   :54.40                     5      :  38   6:  32                    
##                                    (Other):  30                             
##  q30      q41      q47      q49      q50      q52      q53      q63     
##  1:1636   1:2031   1:2473   1:3038   1:3608   1:3760   1:3737   2:2001  
##  2:2168   2: 873   2: 427   2: 323   2: 196   2:  44   2:  67   3:1803  
##           3: 445   3: 313   3: 196                                      
##           4: 248   4: 193   4:  97                                      
##           5: 150   5: 145   5:  69                                      
##           6:  30   6: 253   6:  81                                      
##           7:  27                                                        
##  q65      q87      q89     
##  2:1744   1: 946   1:1430  
##  3:1765   2:2724   2:1396  
##  4: 295   3: 134   3: 668  
##                    4: 147  
##                    5:  50  
##                    6:  12  
##                    7: 101

Variable Importance for every State

Alabama (AL)

df_al = df_fit_state %>% 
  filter(sitename == "Alabama (AL)") %>% 
  select(-sitename, -year)


set.seed(123)
#Create grid to search lambda
lambda <- 10^seq(-3,3, length = 100)

# Specify training control
train_control_ridge <- trainControl(method = "cv", number = 10)

model_ridge = train(q85 ~.,
                    df_al, 
                    method = "glmnet", 
                    trControl = train_control_ridge, 
                    tuneGrid = expand.grid(alpha = 0, lambda = lambda))

summary(model_ridge)
##             Length Class      Mode     
## a0          300    -none-     numeric  
## beta          3    -none-     list     
## dfmat       300    -none-     numeric  
## df          100    -none-     numeric  
## dim           2    -none-     numeric  
## lambda      100    -none-     numeric  
## dev.ratio   100    -none-     numeric  
## nulldev       1    -none-     numeric  
## npasses       1    -none-     numeric  
## jerr          1    -none-     numeric  
## offset        1    -none-     logical  
## classnames    3    -none-     character
## grouped       1    -none-     logical  
## call          5    -none-     call     
## nobs          1    -none-     numeric  
## lambdaOpt     1    -none-     numeric  
## xNames       59    -none-     character
## problemType   1    -none-     character
## tuneValue     2    data.frame list     
## obsLevels     3    -none-     character
## param         0    -none-     list
model_ridge$bestTune
##     alpha lambda
## 100     0   1000
model_ridge$results
##     alpha       lambda  Accuracy         Kappa AccuracySD     KappaSD
## 1       0 1.000000e-03 0.7121269  0.1258427836 0.07572218 0.194468365
## 2       0 1.149757e-03 0.7121269  0.1258427836 0.07572218 0.194468365
## 3       0 1.321941e-03 0.7121269  0.1258427836 0.07572218 0.194468365
## 4       0 1.519911e-03 0.7121269  0.1258427836 0.07572218 0.194468365
## 5       0 1.747528e-03 0.7121269  0.1258427836 0.07572218 0.194468365
## 6       0 2.009233e-03 0.7121269  0.1258427836 0.07572218 0.194468365
## 7       0 2.310130e-03 0.7121269  0.1258427836 0.07572218 0.194468365
## 8       0 2.656088e-03 0.7121269  0.1258427836 0.07572218 0.194468365
## 9       0 3.053856e-03 0.7121269  0.1258427836 0.07572218 0.194468365
## 10      0 3.511192e-03 0.7121269  0.1258427836 0.07572218 0.194468365
## 11      0 4.037017e-03 0.7121269  0.1258427836 0.07572218 0.194468365
## 12      0 4.641589e-03 0.7121269  0.1258427836 0.07572218 0.194468365
## 13      0 5.336699e-03 0.7121269  0.1258427836 0.07572218 0.194468365
## 14      0 6.135907e-03 0.7121269  0.1258427836 0.07572218 0.194468365
## 15      0 7.054802e-03 0.7121269  0.1258427836 0.07572218 0.194468365
## 16      0 8.111308e-03 0.7121269  0.1169651679 0.07572218 0.200936892
## 17      0 9.326033e-03 0.7121269  0.1169651679 0.07572218 0.200936892
## 18      0 1.072267e-02 0.7153527  0.1221298234 0.07629121 0.205065701
## 19      0 1.232847e-02 0.7184777  0.1253243157 0.07122880 0.199870862
## 20      0 1.417474e-02 0.7184777  0.1253243157 0.07122880 0.199870862
## 21      0 1.629751e-02 0.7218110  0.1271256941 0.06748299 0.197219040
## 22      0 1.873817e-02 0.7183628  0.0854714633 0.06214758 0.188132846
## 23      0 2.154435e-02 0.7183628  0.0768782408 0.05830815 0.178090376
## 24      0 2.477076e-02 0.7151370  0.0625607588 0.05971733 0.176217588
## 25      0 2.848036e-02 0.7214878  0.0719807111 0.05729897 0.182284601
## 26      0 3.274549e-02 0.7246128  0.0762282095 0.05798117 0.185710696
## 27      0 3.764936e-02 0.7341894  0.0883270492 0.05190164 0.182821808
## 28      0 4.328761e-02 0.7341894  0.0883270492 0.05190164 0.182821808
## 29      0 4.977024e-02 0.7373144  0.0931774185 0.05386309 0.187548454
## 30      0 5.722368e-02 0.7341894  0.0665397603 0.05190164 0.177874535
## 31      0 6.579332e-02 0.7375227  0.0718029182 0.05290132 0.174707017
## 32      0 7.564633e-02 0.7377310  0.0607379698 0.04692488 0.155771114
## 33      0 8.697490e-02 0.7409569  0.0655281988 0.04328580 0.155079558
## 34      0 1.000000e-01 0.7443977  0.0586286257 0.05080588 0.177075998
## 35      0 1.149757e-01 0.7408560  0.0273008904 0.03521268 0.115339013
## 36      0 1.321941e-01 0.7439810  0.0253255616 0.03743112 0.121396682
## 37      0 1.519911e-01 0.7473077  0.0170064644 0.02732543 0.101651793
## 38      0 1.747528e-01 0.7473077  0.0170064644 0.02732543 0.101651793
## 39      0 2.009233e-01 0.7473077  0.0170064644 0.02732543 0.101651793
## 40      0 2.310130e-01 0.7473077  0.0170064644 0.02732543 0.101651793
## 41      0 2.656088e-01 0.7441827 -0.0004317918 0.02873393 0.096205194
## 42      0 3.053856e-01 0.7473077  0.0040477603 0.02301446 0.092227191
## 43      0 3.511192e-01 0.7439810 -0.0200472060 0.03109760 0.026426313
## 44      0 4.037017e-01 0.7439810 -0.0200472060 0.03109760 0.026426313
## 45      0 4.641589e-01 0.7471060 -0.0149377169 0.02982451 0.024635145
## 46      0 5.336699e-01 0.7471060 -0.0149377169 0.02982451 0.024635145
## 47      0 6.135907e-01 0.7504394 -0.0092108447 0.02997653 0.019903277
## 48      0 7.054802e-01 0.7536652 -0.0036789298 0.03068698 0.011633797
## 49      0 8.111308e-01 0.7568910  0.0010638298 0.02234243 0.003364125
## 50      0 9.326033e-01 0.7568910  0.0010638298 0.02234243 0.003364125
## 51      0 1.072267e+00 0.7568910 -0.0052830189 0.02234243 0.016706373
## 52      0 1.232847e+00 0.7601168  0.0000000000 0.01627249 0.000000000
## 53      0 1.417474e+00 0.7601168  0.0000000000 0.01627249 0.000000000
## 54      0 1.629751e+00 0.7601168  0.0000000000 0.01627249 0.000000000
## 55      0 1.873817e+00 0.7601168  0.0000000000 0.01627249 0.000000000
## 56      0 2.154435e+00 0.7601168  0.0000000000 0.01627249 0.000000000
## 57      0 2.477076e+00 0.7601168  0.0000000000 0.01627249 0.000000000
## 58      0 2.848036e+00 0.7601168  0.0000000000 0.01627249 0.000000000
## 59      0 3.274549e+00 0.7601168  0.0000000000 0.01627249 0.000000000
## 60      0 3.764936e+00 0.7601168  0.0000000000 0.01627249 0.000000000
## 61      0 4.328761e+00 0.7601168  0.0000000000 0.01627249 0.000000000
## 62      0 4.977024e+00 0.7601168  0.0000000000 0.01627249 0.000000000
## 63      0 5.722368e+00 0.7601168  0.0000000000 0.01627249 0.000000000
## 64      0 6.579332e+00 0.7601168  0.0000000000 0.01627249 0.000000000
## 65      0 7.564633e+00 0.7601168  0.0000000000 0.01627249 0.000000000
## 66      0 8.697490e+00 0.7601168  0.0000000000 0.01627249 0.000000000
## 67      0 1.000000e+01 0.7601168  0.0000000000 0.01627249 0.000000000
## 68      0 1.149757e+01 0.7601168  0.0000000000 0.01627249 0.000000000
## 69      0 1.321941e+01 0.7601168  0.0000000000 0.01627249 0.000000000
## 70      0 1.519911e+01 0.7601168  0.0000000000 0.01627249 0.000000000
## 71      0 1.747528e+01 0.7601168  0.0000000000 0.01627249 0.000000000
## 72      0 2.009233e+01 0.7601168  0.0000000000 0.01627249 0.000000000
## 73      0 2.310130e+01 0.7601168  0.0000000000 0.01627249 0.000000000
## 74      0 2.656088e+01 0.7601168  0.0000000000 0.01627249 0.000000000
## 75      0 3.053856e+01 0.7601168  0.0000000000 0.01627249 0.000000000
## 76      0 3.511192e+01 0.7601168  0.0000000000 0.01627249 0.000000000
## 77      0 4.037017e+01 0.7601168  0.0000000000 0.01627249 0.000000000
## 78      0 4.641589e+01 0.7601168  0.0000000000 0.01627249 0.000000000
## 79      0 5.336699e+01 0.7601168  0.0000000000 0.01627249 0.000000000
## 80      0 6.135907e+01 0.7601168  0.0000000000 0.01627249 0.000000000
## 81      0 7.054802e+01 0.7601168  0.0000000000 0.01627249 0.000000000
## 82      0 8.111308e+01 0.7601168  0.0000000000 0.01627249 0.000000000
## 83      0 9.326033e+01 0.7601168  0.0000000000 0.01627249 0.000000000
## 84      0 1.072267e+02 0.7601168  0.0000000000 0.01627249 0.000000000
## 85      0 1.232847e+02 0.7601168  0.0000000000 0.01627249 0.000000000
## 86      0 1.417474e+02 0.7601168  0.0000000000 0.01627249 0.000000000
## 87      0 1.629751e+02 0.7601168  0.0000000000 0.01627249 0.000000000
## 88      0 1.873817e+02 0.7601168  0.0000000000 0.01627249 0.000000000
## 89      0 2.154435e+02 0.7601168  0.0000000000 0.01627249 0.000000000
## 90      0 2.477076e+02 0.7601168  0.0000000000 0.01627249 0.000000000
## 91      0 2.848036e+02 0.7601168  0.0000000000 0.01627249 0.000000000
## 92      0 3.274549e+02 0.7601168  0.0000000000 0.01627249 0.000000000
## 93      0 3.764936e+02 0.7601168  0.0000000000 0.01627249 0.000000000
## 94      0 4.328761e+02 0.7601168  0.0000000000 0.01627249 0.000000000
## 95      0 4.977024e+02 0.7601168  0.0000000000 0.01627249 0.000000000
## 96      0 5.722368e+02 0.7601168  0.0000000000 0.01627249 0.000000000
## 97      0 6.579332e+02 0.7601168  0.0000000000 0.01627249 0.000000000
## 98      0 7.564633e+02 0.7601168  0.0000000000 0.01627249 0.000000000
## 99      0 8.697490e+02 0.7601168  0.0000000000 0.01627249 0.000000000
## 100     0 1.000000e+03 0.7601168  0.0000000000 0.01627249 0.000000000
#Visualize accuracy versus values of C
plot(model_ridge)

#Obtain metrics of accuracy from training
confusionMatrix(model_ridge)
## Cross-Validated (10 fold) Confusion Matrix 
## 
## (entries are percentual average cell counts across resamples)
##  
##           Reference
## Prediction    1    2    3
##          1  0.0  0.0  0.0
##          2 18.5 76.0  5.5
##          3  0.0  0.0  0.0
##                             
##  Accuracy (average) : 0.7597
varImp(model_ridge)
## glmnet variable importance
## 
##   variables are sorted by maximum importance across the classes
##   only 20 most important variables shown (out of 59)
## 
##              1     2        3
## q532   100.000 88.54 11.46067
## q176    38.301 53.76 92.05772
## q496    45.678 76.45 30.77370
## q896    65.180 53.76 11.42322
## q417    65.180 53.76 11.42322
## q522    65.180 53.76 11.42322
## q502    41.194 56.33 15.13968
## q897    54.938 43.25 11.69065
## q494    54.938 43.25 11.69065
## q873    13.708 54.83 41.12360
## q416    38.553 50.05 11.49837
## q214    38.553 50.05 11.49837
## q215    38.427 49.89 11.46067
## q895    38.301 49.72 11.42322
## q476    44.511 44.59  0.07878
## q174    21.177 39.62 18.44414
## q178     3.122 33.39 30.26588
## q894    20.077 31.85 11.76938
## grade4  31.822 26.31  5.50762
## q893    25.579 30.95  5.37523
plot(varImp(model_ridge))

var_import = varImp(model_ridge)$importance 

Arkansas (AR)

df_ar = df_fit_state %>% 
  filter(sitename == "Arkansas (AR)") %>% 
  select(-sitename, -year)


set.seed(123)
#Create grid to search lambda
lambda <- 10^seq(-3,3, length = 100)

# Specify training control
train_control_ridge <- trainControl(method = "cv", number = 10)

model_ridge = train(q85 ~.,
                    df_ar, 
                    method = "glmnet", 
                    trControl = train_control_ridge, 
                    tuneGrid = expand.grid(alpha = 0, lambda = lambda))

summary(model_ridge)
##             Length Class      Mode     
## a0          300    -none-     numeric  
## beta          3    -none-     list     
## dfmat       300    -none-     numeric  
## df          100    -none-     numeric  
## dim           2    -none-     numeric  
## lambda      100    -none-     numeric  
## dev.ratio   100    -none-     numeric  
## nulldev       1    -none-     numeric  
## npasses       1    -none-     numeric  
## jerr          1    -none-     numeric  
## offset        1    -none-     logical  
## classnames    3    -none-     character
## grouped       1    -none-     logical  
## call          5    -none-     call     
## nobs          1    -none-     numeric  
## lambdaOpt     1    -none-     numeric  
## xNames       59    -none-     character
## problemType   1    -none-     character
## tuneValue     2    data.frame list     
## obsLevels     3    -none-     character
## param         0    -none-     list
model_ridge$bestTune
##    alpha    lambda
## 48     0 0.7054802
model_ridge$results
##     alpha       lambda  Accuracy      Kappa AccuracySD    KappaSD
## 1       0 1.000000e-03 0.7106046 0.07197238 0.07432576 0.18270805
## 2       0 1.149757e-03 0.7106046 0.07197238 0.07432576 0.18270805
## 3       0 1.321941e-03 0.7106046 0.07197238 0.07432576 0.18270805
## 4       0 1.519911e-03 0.7106046 0.07197238 0.07432576 0.18270805
## 5       0 1.747528e-03 0.7106046 0.07197238 0.07432576 0.18270805
## 6       0 2.009233e-03 0.7106046 0.07197238 0.07432576 0.18270805
## 7       0 2.310130e-03 0.7106046 0.07197238 0.07432576 0.18270805
## 8       0 2.656088e-03 0.7106046 0.07197238 0.07432576 0.18270805
## 9       0 3.053856e-03 0.7106046 0.07197238 0.07432576 0.18270805
## 10      0 3.511192e-03 0.7106046 0.07197238 0.07432576 0.18270805
## 11      0 4.037017e-03 0.7106046 0.07197238 0.07432576 0.18270805
## 12      0 4.641589e-03 0.7106046 0.07197238 0.07432576 0.18270805
## 13      0 5.336699e-03 0.7106046 0.07197238 0.07432576 0.18270805
## 14      0 6.135907e-03 0.7106046 0.07197238 0.07432576 0.18270805
## 15      0 7.054802e-03 0.7106046 0.07197238 0.07432576 0.18270805
## 16      0 8.111308e-03 0.7106046 0.07197238 0.07432576 0.18270805
## 17      0 9.326033e-03 0.7139379 0.07457583 0.07286950 0.17977364
## 18      0 1.072267e-02 0.7107121 0.05514799 0.06722798 0.15179106
## 19      0 1.232847e-02 0.7140455 0.05839474 0.06746379 0.14910631
## 20      0 1.417474e-02 0.7107121 0.04201176 0.06903995 0.15297713
## 21      0 1.629751e-02 0.7107121 0.04201176 0.06903995 0.15297713
## 22      0 1.873817e-02 0.7171705 0.05137739 0.06885847 0.15787503
## 23      0 2.154435e-02 0.7202955 0.05516781 0.06806688 0.16170122
## 24      0 2.477076e-02 0.7202955 0.05019168 0.06806688 0.15684714
## 25      0 2.848036e-02 0.7202955 0.05019168 0.06806688 0.15684714
## 26      0 3.274549e-02 0.7269621 0.05994778 0.06911492 0.14949759
## 27      0 3.764936e-02 0.7269621 0.05994778 0.06911492 0.14949759
## 28      0 4.328761e-02 0.7301879 0.06383109 0.06374404 0.14642601
## 29      0 4.977024e-02 0.7335213 0.04518530 0.06666804 0.15450072
## 30      0 5.722368e-02 0.7367471 0.04948644 0.06257220 0.15295085
## 31      0 6.579332e-02 0.7332988 0.03005171 0.06825892 0.17295178
## 32      0 7.564633e-02 0.7332988 0.03005171 0.06825892 0.17295178
## 33      0 8.697490e-02 0.7434137 0.04512439 0.06564412 0.18400634
## 34      0 1.000000e-01 0.7434137 0.03355268 0.06907700 0.19090222
## 35      0 1.149757e-01 0.7434137 0.03355268 0.06907700 0.19090222
## 36      0 1.321941e-01 0.7468479 0.02689049 0.07205638 0.18305397
## 37      0 1.519911e-01 0.7498654 0.01533498 0.05916898 0.13315382
## 38      0 1.747528e-01 0.7533136 0.01874747 0.05379831 0.12795989
## 39      0 2.009233e-01 0.7597720 0.02847240 0.04944629 0.12744797
## 40      0 2.310130e-01 0.7629978 0.03311846 0.04425667 0.12304367
## 41      0 2.656088e-01 0.7661228 0.03903787 0.04434013 0.12913654
## 42      0 3.053856e-01 0.7729044 0.04829852 0.03662892 0.11983995
## 43      0 3.511192e-01 0.7695711 0.02367541 0.03170437 0.09794951
## 44      0 4.037017e-01 0.7762451 0.03370545 0.02063786 0.08825964
## 45      0 4.641589e-01 0.7730193 0.01251901 0.01770439 0.06237402
## 46      0 5.336699e-01 0.7730193 0.01251901 0.01770439 0.06237402
## 47      0 6.135907e-01 0.7730193 0.01251901 0.01770439 0.06237402
## 48      0 7.054802e-01 0.7764676 0.01824818 0.01794484 0.05770580
## 49      0 8.111308e-01 0.7733426 0.00000000 0.01965865 0.00000000
## 50      0 9.326033e-01 0.7733426 0.00000000 0.01965865 0.00000000
## 51      0 1.072267e+00 0.7733426 0.00000000 0.01965865 0.00000000
## 52      0 1.232847e+00 0.7733426 0.00000000 0.01965865 0.00000000
## 53      0 1.417474e+00 0.7733426 0.00000000 0.01965865 0.00000000
## 54      0 1.629751e+00 0.7733426 0.00000000 0.01965865 0.00000000
## 55      0 1.873817e+00 0.7733426 0.00000000 0.01965865 0.00000000
## 56      0 2.154435e+00 0.7733426 0.00000000 0.01965865 0.00000000
## 57      0 2.477076e+00 0.7733426 0.00000000 0.01965865 0.00000000
## 58      0 2.848036e+00 0.7733426 0.00000000 0.01965865 0.00000000
## 59      0 3.274549e+00 0.7733426 0.00000000 0.01965865 0.00000000
## 60      0 3.764936e+00 0.7733426 0.00000000 0.01965865 0.00000000
## 61      0 4.328761e+00 0.7733426 0.00000000 0.01965865 0.00000000
## 62      0 4.977024e+00 0.7733426 0.00000000 0.01965865 0.00000000
## 63      0 5.722368e+00 0.7733426 0.00000000 0.01965865 0.00000000
## 64      0 6.579332e+00 0.7733426 0.00000000 0.01965865 0.00000000
## 65      0 7.564633e+00 0.7733426 0.00000000 0.01965865 0.00000000
## 66      0 8.697490e+00 0.7733426 0.00000000 0.01965865 0.00000000
## 67      0 1.000000e+01 0.7733426 0.00000000 0.01965865 0.00000000
## 68      0 1.149757e+01 0.7733426 0.00000000 0.01965865 0.00000000
## 69      0 1.321941e+01 0.7733426 0.00000000 0.01965865 0.00000000
## 70      0 1.519911e+01 0.7733426 0.00000000 0.01965865 0.00000000
## 71      0 1.747528e+01 0.7733426 0.00000000 0.01965865 0.00000000
## 72      0 2.009233e+01 0.7733426 0.00000000 0.01965865 0.00000000
## 73      0 2.310130e+01 0.7733426 0.00000000 0.01965865 0.00000000
## 74      0 2.656088e+01 0.7733426 0.00000000 0.01965865 0.00000000
## 75      0 3.053856e+01 0.7733426 0.00000000 0.01965865 0.00000000
## 76      0 3.511192e+01 0.7733426 0.00000000 0.01965865 0.00000000
## 77      0 4.037017e+01 0.7733426 0.00000000 0.01965865 0.00000000
## 78      0 4.641589e+01 0.7733426 0.00000000 0.01965865 0.00000000
## 79      0 5.336699e+01 0.7733426 0.00000000 0.01965865 0.00000000
## 80      0 6.135907e+01 0.7733426 0.00000000 0.01965865 0.00000000
## 81      0 7.054802e+01 0.7733426 0.00000000 0.01965865 0.00000000
## 82      0 8.111308e+01 0.7733426 0.00000000 0.01965865 0.00000000
## 83      0 9.326033e+01 0.7733426 0.00000000 0.01965865 0.00000000
## 84      0 1.072267e+02 0.7733426 0.00000000 0.01965865 0.00000000
## 85      0 1.232847e+02 0.7733426 0.00000000 0.01965865 0.00000000
## 86      0 1.417474e+02 0.7733426 0.00000000 0.01965865 0.00000000
## 87      0 1.629751e+02 0.7733426 0.00000000 0.01965865 0.00000000
## 88      0 1.873817e+02 0.7733426 0.00000000 0.01965865 0.00000000
## 89      0 2.154435e+02 0.7733426 0.00000000 0.01965865 0.00000000
## 90      0 2.477076e+02 0.7733426 0.00000000 0.01965865 0.00000000
## 91      0 2.848036e+02 0.7733426 0.00000000 0.01965865 0.00000000
## 92      0 3.274549e+02 0.7733426 0.00000000 0.01965865 0.00000000
## 93      0 3.764936e+02 0.7733426 0.00000000 0.01965865 0.00000000
## 94      0 4.328761e+02 0.7733426 0.00000000 0.01965865 0.00000000
## 95      0 4.977024e+02 0.7733426 0.00000000 0.01965865 0.00000000
## 96      0 5.722368e+02 0.7733426 0.00000000 0.01965865 0.00000000
## 97      0 6.579332e+02 0.7733426 0.00000000 0.01965865 0.00000000
## 98      0 7.564633e+02 0.7733426 0.00000000 0.01965865 0.00000000
## 99      0 8.697490e+02 0.7733426 0.00000000 0.01965865 0.00000000
## 100     0 1.000000e+03 0.7733426 0.00000000 0.01965865 0.00000000
#Visualize accuracy versus values of C
plot(model_ridge)

#Obtain metrics of accuracy from training
confusionMatrix(model_ridge)
## Cross-Validated (10 fold) Confusion Matrix 
## 
## (entries are percentual average cell counts across resamples)
##  
##           Reference
## Prediction    1    2    3
##          1  0.3  0.0  0.0
##          2 16.8 77.3  5.6
##          3  0.0  0.0  0.0
##                             
##  Accuracy (average) : 0.7763
varImp(model_ridge)
## glmnet variable importance
## 
##   variables are sorted by maximum importance across the classes
##   only 20 most important variables shown (out of 59)
## 
##                1       2       3
## q174      54.613 45.3870 100.000
## q494      77.579 58.3534  19.225
## age3      75.458 57.8754  17.583
## q894      73.717 52.7120  21.005
## q215      51.725 66.1565  14.432
## q873      18.312 46.6983  65.010
## q416      47.770 64.6242  16.854
## q896      53.235 63.5035  10.269
## q417      53.126 63.4076  10.282
## q895      43.745 62.9165  19.172
## q175       8.420 62.7269  54.307
## q496      46.864 58.7002  11.837
## q522      56.116 38.4396  17.676
## q176      40.610 54.7216  14.112
## q214       1.268 52.7095  53.978
## q502      50.714 30.5341  20.180
## q532      46.493 29.3122  17.181
## q474      46.056 37.3610   8.695
## qnothhpl2 44.506 33.6007  10.905
## q897      41.820  0.5458  41.274
plot(varImp(model_ridge))

var_import = varImp(model_ridge)$importance

Illinois (IL)

df_il = df_fit_state %>% 
  filter(sitename == "Illinois (IL)") %>% 
  select(-sitename, -year)


set.seed(123)
#Create grid to search lambda
lambda <- 10^seq(-3,3, length = 100)

# Specify training control
train_control_ridge <- trainControl(method = "cv", number = 10)

model_ridge = train(q85 ~.,
                    df_il , 
                    method = "glmnet", 
                    trControl = train_control_ridge, 
                    tuneGrid = expand.grid(alpha = 0, lambda = lambda))

summary(model_ridge)
##             Length Class      Mode     
## a0          300    -none-     numeric  
## beta          3    -none-     list     
## dfmat       300    -none-     numeric  
## df          100    -none-     numeric  
## dim           2    -none-     numeric  
## lambda      100    -none-     numeric  
## dev.ratio   100    -none-     numeric  
## nulldev       1    -none-     numeric  
## npasses       1    -none-     numeric  
## jerr          1    -none-     numeric  
## offset        1    -none-     logical  
## classnames    3    -none-     character
## grouped       1    -none-     logical  
## call          5    -none-     call     
## nobs          1    -none-     numeric  
## lambdaOpt     1    -none-     numeric  
## xNames       59    -none-     character
## problemType   1    -none-     character
## tuneValue     2    data.frame list     
## obsLevels     3    -none-     character
## param         0    -none-     list
model_ridge$bestTune
##    alpha    lambda
## 45     0 0.4641589
model_ridge$results
##     alpha       lambda  Accuracy       Kappa AccuracySD    KappaSD
## 1       0 1.000000e-03 0.6830435 0.075621632 0.05154774 0.15361880
## 2       0 1.149757e-03 0.6830435 0.075621632 0.05154774 0.15361880
## 3       0 1.321941e-03 0.6830435 0.075621632 0.05154774 0.15361880
## 4       0 1.519911e-03 0.6830435 0.075621632 0.05154774 0.15361880
## 5       0 1.747528e-03 0.6830435 0.075621632 0.05154774 0.15361880
## 6       0 2.009233e-03 0.6830435 0.075621632 0.05154774 0.15361880
## 7       0 2.310130e-03 0.6830435 0.075621632 0.05154774 0.15361880
## 8       0 2.656088e-03 0.6830435 0.075621632 0.05154774 0.15361880
## 9       0 3.053856e-03 0.6830435 0.075621632 0.05154774 0.15361880
## 10      0 3.511192e-03 0.6830435 0.075621632 0.05154774 0.15361880
## 11      0 4.037017e-03 0.6830435 0.075621632 0.05154774 0.15361880
## 12      0 4.641589e-03 0.6830435 0.075621632 0.05154774 0.15361880
## 13      0 5.336699e-03 0.6830435 0.075621632 0.05154774 0.15361880
## 14      0 6.135907e-03 0.6830435 0.075621632 0.05154774 0.15361880
## 15      0 7.054802e-03 0.6830435 0.075621632 0.05154774 0.15361880
## 16      0 8.111308e-03 0.6830435 0.075621632 0.05154774 0.15361880
## 17      0 9.326033e-03 0.6830435 0.071014482 0.05154774 0.15684991
## 18      0 1.072267e-02 0.6830435 0.071014482 0.05154774 0.15684991
## 19      0 1.232847e-02 0.6830435 0.071014482 0.05154774 0.15684991
## 20      0 1.417474e-02 0.6830435 0.071014482 0.05154774 0.15684991
## 21      0 1.629751e-02 0.6830435 0.071014482 0.05154774 0.15684991
## 22      0 1.873817e-02 0.6856751 0.074443377 0.04766094 0.15317217
## 23      0 2.154435e-02 0.6856751 0.067849246 0.04766094 0.15191778
## 24      0 2.477076e-02 0.6881751 0.071805445 0.05187896 0.15978176
## 25      0 2.848036e-02 0.6881751 0.071805445 0.05187896 0.15978176
## 26      0 3.274549e-02 0.6909318 0.066856424 0.05091452 0.16722120
## 27      0 3.764936e-02 0.6909318 0.066856424 0.05091452 0.16722120
## 28      0 4.328761e-02 0.6934318 0.071274095 0.05065845 0.16452377
## 29      0 4.977024e-02 0.6909318 0.061968221 0.04664356 0.14761148
## 30      0 5.722368e-02 0.6909318 0.053153680 0.04810936 0.14382683
## 31      0 6.579332e-02 0.7010024 0.068269370 0.03917493 0.13540297
## 32      0 7.564633e-02 0.7035024 0.072399448 0.04481376 0.14555840
## 33      0 8.697490e-02 0.7085665 0.077331699 0.04188124 0.13943342
## 34      0 1.000000e-01 0.7085665 0.078881217 0.04948214 0.15586243
## 35      0 1.149757e-01 0.7060665 0.066200058 0.04919029 0.16054829
## 36      0 1.321941e-01 0.7085665 0.071077682 0.05086621 0.16493675
## 37      0 1.519911e-01 0.7085055 0.064674901 0.04089860 0.13483324
## 38      0 1.747528e-01 0.7110055 0.068948405 0.04108458 0.13712630
## 39      0 2.009233e-01 0.7135055 0.060222728 0.04275734 0.15168970
## 40      0 2.310130e-01 0.7160055 0.064570554 0.04261056 0.14898610
## 41      0 2.656088e-01 0.7211371 0.073609703 0.04238899 0.14581316
## 42      0 3.053856e-01 0.7186371 0.053638735 0.03381451 0.12268306
## 43      0 3.511192e-01 0.7212686 0.058400640 0.03413564 0.11919051
## 44      0 4.037017e-01 0.7262077 0.059774055 0.02855714 0.10726575
## 45      0 4.641589e-01 0.7287718 0.064238340 0.02623155 0.10329258
## 46      0 5.336699e-01 0.7212718 0.026508670 0.01847253 0.07623700
## 47      0 6.135907e-01 0.7187718 0.013827510 0.01957095 0.07628145
## 48      0 7.054802e-01 0.7162718 0.002993477 0.01649339 0.04373988
## 49      0 8.111308e-01 0.7212108 0.011646586 0.01045658 0.03682974
## 50      0 9.326033e-01 0.7212108 0.011646586 0.01045658 0.03682974
## 51      0 1.072267e+00 0.7212108 0.011646586 0.01045658 0.03682974
## 52      0 1.232847e+00 0.7212108 0.011646586 0.01045658 0.03682974
## 53      0 1.417474e+00 0.7187108 0.000000000 0.01227961 0.00000000
## 54      0 1.629751e+00 0.7187108 0.000000000 0.01227961 0.00000000
## 55      0 1.873817e+00 0.7187108 0.000000000 0.01227961 0.00000000
## 56      0 2.154435e+00 0.7187108 0.000000000 0.01227961 0.00000000
## 57      0 2.477076e+00 0.7187108 0.000000000 0.01227961 0.00000000
## 58      0 2.848036e+00 0.7187108 0.000000000 0.01227961 0.00000000
## 59      0 3.274549e+00 0.7187108 0.000000000 0.01227961 0.00000000
## 60      0 3.764936e+00 0.7187108 0.000000000 0.01227961 0.00000000
## 61      0 4.328761e+00 0.7187108 0.000000000 0.01227961 0.00000000
## 62      0 4.977024e+00 0.7187108 0.000000000 0.01227961 0.00000000
## 63      0 5.722368e+00 0.7187108 0.000000000 0.01227961 0.00000000
## 64      0 6.579332e+00 0.7187108 0.000000000 0.01227961 0.00000000
## 65      0 7.564633e+00 0.7187108 0.000000000 0.01227961 0.00000000
## 66      0 8.697490e+00 0.7187108 0.000000000 0.01227961 0.00000000
## 67      0 1.000000e+01 0.7187108 0.000000000 0.01227961 0.00000000
## 68      0 1.149757e+01 0.7187108 0.000000000 0.01227961 0.00000000
## 69      0 1.321941e+01 0.7187108 0.000000000 0.01227961 0.00000000
## 70      0 1.519911e+01 0.7187108 0.000000000 0.01227961 0.00000000
## 71      0 1.747528e+01 0.7187108 0.000000000 0.01227961 0.00000000
## 72      0 2.009233e+01 0.7187108 0.000000000 0.01227961 0.00000000
## 73      0 2.310130e+01 0.7187108 0.000000000 0.01227961 0.00000000
## 74      0 2.656088e+01 0.7187108 0.000000000 0.01227961 0.00000000
## 75      0 3.053856e+01 0.7187108 0.000000000 0.01227961 0.00000000
## 76      0 3.511192e+01 0.7187108 0.000000000 0.01227961 0.00000000
## 77      0 4.037017e+01 0.7187108 0.000000000 0.01227961 0.00000000
## 78      0 4.641589e+01 0.7187108 0.000000000 0.01227961 0.00000000
## 79      0 5.336699e+01 0.7187108 0.000000000 0.01227961 0.00000000
## 80      0 6.135907e+01 0.7187108 0.000000000 0.01227961 0.00000000
## 81      0 7.054802e+01 0.7187108 0.000000000 0.01227961 0.00000000
## 82      0 8.111308e+01 0.7187108 0.000000000 0.01227961 0.00000000
## 83      0 9.326033e+01 0.7187108 0.000000000 0.01227961 0.00000000
## 84      0 1.072267e+02 0.7187108 0.000000000 0.01227961 0.00000000
## 85      0 1.232847e+02 0.7187108 0.000000000 0.01227961 0.00000000
## 86      0 1.417474e+02 0.7187108 0.000000000 0.01227961 0.00000000
## 87      0 1.629751e+02 0.7187108 0.000000000 0.01227961 0.00000000
## 88      0 1.873817e+02 0.7187108 0.000000000 0.01227961 0.00000000
## 89      0 2.154435e+02 0.7187108 0.000000000 0.01227961 0.00000000
## 90      0 2.477076e+02 0.7187108 0.000000000 0.01227961 0.00000000
## 91      0 2.848036e+02 0.7187108 0.000000000 0.01227961 0.00000000
## 92      0 3.274549e+02 0.7187108 0.000000000 0.01227961 0.00000000
## 93      0 3.764936e+02 0.7187108 0.000000000 0.01227961 0.00000000
## 94      0 4.328761e+02 0.7187108 0.000000000 0.01227961 0.00000000
## 95      0 4.977024e+02 0.7187108 0.000000000 0.01227961 0.00000000
## 96      0 5.722368e+02 0.7187108 0.000000000 0.01227961 0.00000000
## 97      0 6.579332e+02 0.7187108 0.000000000 0.01227961 0.00000000
## 98      0 7.564633e+02 0.7187108 0.000000000 0.01227961 0.00000000
## 99      0 8.697490e+02 0.7187108 0.000000000 0.01227961 0.00000000
## 100     0 1.000000e+03 0.7187108 0.000000000 0.01227961 0.00000000
#Visualize accuracy versus values of C
plot(model_ridge)

#Obtain metrics of accuracy from training
confusionMatrix(model_ridge)
## Cross-Validated (10 fold) Confusion Matrix 
## 
## (entries are percentual average cell counts across resamples)
##  
##           Reference
## Prediction    1    2    3
##          1  1.5  0.5  0.0
##          2 22.1 71.4  4.5
##          3  0.0  0.0  0.0
##                             
##  Accuracy (average) : 0.7286
varImp(model_ridge)
## glmnet variable importance
## 
##   variables are sorted by maximum importance across the classes
##   only 20 most important variables shown (out of 59)
## 
##                 1      2      3
## q177      100.000 90.572  9.428
## q416       64.656 57.699  6.957
## q417       48.763 56.114  7.352
## q494       54.713 42.876 11.837
## q216       34.966 43.492  8.526
## q176       33.229 41.412  8.182
## q873       15.838 22.117 37.955
## q532       36.009 27.497  8.512
## q178        2.857 30.540 27.683
## q174       18.561 26.897  8.335
## q897       26.822 15.530 11.292
## qnothhpl2  26.092 23.237  2.855
## q895        8.301 25.462 17.161
## q654       24.102 12.478 11.624
## q175       23.896 14.048  9.848
## q414       13.262 23.554 10.292
## q495       21.810 12.865  8.945
## q496       19.636  8.991 10.645
## q302        7.286 17.028  9.742
## q415       16.537  5.455 11.082
plot(varImp(model_ridge))

var_import = varImp(model_ridge)$importance

Iowa (IA)

df_ia = df_fit_state %>% 
  filter(sitename == "Iowa (IA)") %>% 
  select(-sitename, -year)


set.seed(123)
#Create grid to search lambda
lambda <- 10^seq(-3,3, length = 100)

# Specify training control
train_control_ridge <- trainControl(method = "cv", number = 10)

model_ridge = train(q85 ~.,
                    df_ia, 
                    method = "glmnet", 
                    trControl = train_control_ridge, 
                    tuneGrid = expand.grid(alpha = 0, lambda = lambda))
## Warning in lognet(xd, is.sparse, ix, jx, y, weights, offset, alpha, nobs, : one
## multinomial or binomial class has fewer than 8 observations; dangerous ground

## Warning in lognet(xd, is.sparse, ix, jx, y, weights, offset, alpha, nobs, : one
## multinomial or binomial class has fewer than 8 observations; dangerous ground

## Warning in lognet(xd, is.sparse, ix, jx, y, weights, offset, alpha, nobs, : one
## multinomial or binomial class has fewer than 8 observations; dangerous ground

## Warning in lognet(xd, is.sparse, ix, jx, y, weights, offset, alpha, nobs, : one
## multinomial or binomial class has fewer than 8 observations; dangerous ground

## Warning in lognet(xd, is.sparse, ix, jx, y, weights, offset, alpha, nobs, : one
## multinomial or binomial class has fewer than 8 observations; dangerous ground

## Warning in lognet(xd, is.sparse, ix, jx, y, weights, offset, alpha, nobs, : one
## multinomial or binomial class has fewer than 8 observations; dangerous ground

## Warning in lognet(xd, is.sparse, ix, jx, y, weights, offset, alpha, nobs, : one
## multinomial or binomial class has fewer than 8 observations; dangerous ground

## Warning in lognet(xd, is.sparse, ix, jx, y, weights, offset, alpha, nobs, : one
## multinomial or binomial class has fewer than 8 observations; dangerous ground

## Warning in lognet(xd, is.sparse, ix, jx, y, weights, offset, alpha, nobs, : one
## multinomial or binomial class has fewer than 8 observations; dangerous ground

## Warning in lognet(xd, is.sparse, ix, jx, y, weights, offset, alpha, nobs, : one
## multinomial or binomial class has fewer than 8 observations; dangerous ground

## Warning in lognet(xd, is.sparse, ix, jx, y, weights, offset, alpha, nobs, : one
## multinomial or binomial class has fewer than 8 observations; dangerous ground
summary(model_ridge)
##             Length Class      Mode     
## a0          300    -none-     numeric  
## beta          3    -none-     list     
## dfmat       300    -none-     numeric  
## df          100    -none-     numeric  
## dim           2    -none-     numeric  
## lambda      100    -none-     numeric  
## dev.ratio   100    -none-     numeric  
## nulldev       1    -none-     numeric  
## npasses       1    -none-     numeric  
## jerr          1    -none-     numeric  
## offset        1    -none-     logical  
## classnames    3    -none-     character
## grouped       1    -none-     logical  
## call          5    -none-     call     
## nobs          1    -none-     numeric  
## lambdaOpt     1    -none-     numeric  
## xNames       59    -none-     character
## problemType   1    -none-     character
## tuneValue     2    data.frame list     
## obsLevels     3    -none-     character
## param         0    -none-     list
model_ridge$bestTune
##     alpha lambda
## 100     0   1000
model_ridge$results
##     alpha       lambda  Accuracy        Kappa AccuracySD    KappaSD
## 1       0 1.000000e-03 0.7571476  0.096228747 0.03027599 0.19540107
## 2       0 1.149757e-03 0.7571476  0.096228747 0.03027599 0.19540107
## 3       0 1.321941e-03 0.7571476  0.096228747 0.03027599 0.19540107
## 4       0 1.519911e-03 0.7571476  0.096228747 0.03027599 0.19540107
## 5       0 1.747528e-03 0.7571476  0.096228747 0.03027599 0.19540107
## 6       0 2.009233e-03 0.7571476  0.096228747 0.03027599 0.19540107
## 7       0 2.310130e-03 0.7571476  0.096228747 0.03027599 0.19540107
## 8       0 2.656088e-03 0.7571476  0.096228747 0.03027599 0.19540107
## 9       0 3.053856e-03 0.7571476  0.096228747 0.03027599 0.19540107
## 10      0 3.511192e-03 0.7571476  0.096228747 0.03027599 0.19540107
## 11      0 4.037017e-03 0.7571476  0.096228747 0.03027599 0.19540107
## 12      0 4.641589e-03 0.7571476  0.096228747 0.03027599 0.19540107
## 13      0 5.336699e-03 0.7571476  0.096228747 0.03027599 0.19540107
## 14      0 6.135907e-03 0.7571476  0.096228747 0.03027599 0.19540107
## 15      0 7.054802e-03 0.7571476  0.096228747 0.03027599 0.19540107
## 16      0 8.111308e-03 0.7571476  0.096228747 0.03027599 0.19540107
## 17      0 9.326033e-03 0.7571476  0.096228747 0.03027599 0.19540107
## 18      0 1.072267e-02 0.7571476  0.096228747 0.03027599 0.19540107
## 19      0 1.232847e-02 0.7571476  0.096228747 0.03027599 0.19540107
## 20      0 1.417474e-02 0.7614954  0.103168402 0.03052201 0.19824134
## 21      0 1.629751e-02 0.7614954  0.081835069 0.02336216 0.19654034
## 22      0 1.873817e-02 0.7614954  0.081835069 0.02336216 0.19654034
## 23      0 2.154435e-02 0.7614954  0.069875572 0.02336216 0.19166295
## 24      0 2.477076e-02 0.7618742  0.038859697 0.01852170 0.16583864
## 25      0 2.848036e-02 0.7618742  0.028193030 0.01852170 0.16915437
## 26      0 3.274549e-02 0.7618742  0.016398158 0.01852170 0.12927355
## 27      0 3.764936e-02 0.7618742  0.016398158 0.01852170 0.12927355
## 28      0 4.328761e-02 0.7790843  0.046600935 0.03013221 0.12800982
## 29      0 4.977024e-02 0.7790843  0.039328207 0.03013221 0.12015100
## 30      0 5.722368e-02 0.7790843  0.039328207 0.03013221 0.12015100
## 31      0 6.579332e-02 0.7790843  0.039328207 0.03013221 0.12015100
## 32      0 7.564633e-02 0.7790843  0.039328207 0.03013221 0.12015100
## 33      0 8.697490e-02 0.7790843  0.039328207 0.03013221 0.12015100
## 34      0 1.000000e-01 0.7832510  0.045994874 0.02850021 0.11537077
## 35      0 1.149757e-01 0.7832510  0.045994874 0.02850021 0.11537077
## 36      0 1.321941e-01 0.7832510  0.045994874 0.02850021 0.11537077
## 37      0 1.519911e-01 0.7874177  0.053267601 0.02603831 0.12309720
## 38      0 1.747528e-01 0.7874177  0.053267601 0.02603831 0.12309720
## 39      0 2.009233e-01 0.7915843  0.062156490 0.02984874 0.13726421
## 40      0 2.310130e-01 0.7915843  0.062156490 0.02984874 0.13726421
## 41      0 2.656088e-01 0.7915843  0.062156490 0.02984874 0.13726421
## 42      0 3.053856e-01 0.7832510  0.018924540 0.03979782 0.10139990
## 43      0 3.511192e-01 0.7832510  0.018924540 0.03979782 0.10139990
## 44      0 4.037017e-01 0.7832510 -0.003398693 0.02850021 0.05036102
## 45      0 4.641589e-01 0.7832510 -0.003398693 0.02850021 0.05036102
## 46      0 5.336699e-01 0.7832510 -0.003398693 0.02850021 0.05036102
## 47      0 6.135907e-01 0.7832510 -0.003398693 0.02850021 0.05036102
## 48      0 7.054802e-01 0.7832510 -0.021972490 0.02850021 0.03549208
## 49      0 8.111308e-01 0.7832510 -0.021972490 0.02850021 0.03549208
## 50      0 9.326033e-01 0.7832510 -0.021972490 0.02850021 0.03549208
## 51      0 1.072267e+00 0.7961298  0.000000000 0.02283158 0.00000000
## 52      0 1.232847e+00 0.7961298  0.000000000 0.02283158 0.00000000
## 53      0 1.417474e+00 0.7961298  0.000000000 0.02283158 0.00000000
## 54      0 1.629751e+00 0.7961298  0.000000000 0.02283158 0.00000000
## 55      0 1.873817e+00 0.7961298  0.000000000 0.02283158 0.00000000
## 56      0 2.154435e+00 0.7961298  0.000000000 0.02283158 0.00000000
## 57      0 2.477076e+00 0.7961298  0.000000000 0.02283158 0.00000000
## 58      0 2.848036e+00 0.7961298  0.000000000 0.02283158 0.00000000
## 59      0 3.274549e+00 0.7961298  0.000000000 0.02283158 0.00000000
## 60      0 3.764936e+00 0.7961298  0.000000000 0.02283158 0.00000000
## 61      0 4.328761e+00 0.7961298  0.000000000 0.02283158 0.00000000
## 62      0 4.977024e+00 0.7961298  0.000000000 0.02283158 0.00000000
## 63      0 5.722368e+00 0.7961298  0.000000000 0.02283158 0.00000000
## 64      0 6.579332e+00 0.7961298  0.000000000 0.02283158 0.00000000
## 65      0 7.564633e+00 0.7961298  0.000000000 0.02283158 0.00000000
## 66      0 8.697490e+00 0.7961298  0.000000000 0.02283158 0.00000000
## 67      0 1.000000e+01 0.7961298  0.000000000 0.02283158 0.00000000
## 68      0 1.149757e+01 0.7961298  0.000000000 0.02283158 0.00000000
## 69      0 1.321941e+01 0.7961298  0.000000000 0.02283158 0.00000000
## 70      0 1.519911e+01 0.7961298  0.000000000 0.02283158 0.00000000
## 71      0 1.747528e+01 0.7961298  0.000000000 0.02283158 0.00000000
## 72      0 2.009233e+01 0.7961298  0.000000000 0.02283158 0.00000000
## 73      0 2.310130e+01 0.7961298  0.000000000 0.02283158 0.00000000
## 74      0 2.656088e+01 0.7961298  0.000000000 0.02283158 0.00000000
## 75      0 3.053856e+01 0.7961298  0.000000000 0.02283158 0.00000000
## 76      0 3.511192e+01 0.7961298  0.000000000 0.02283158 0.00000000
## 77      0 4.037017e+01 0.7961298  0.000000000 0.02283158 0.00000000
## 78      0 4.641589e+01 0.7961298  0.000000000 0.02283158 0.00000000
## 79      0 5.336699e+01 0.7961298  0.000000000 0.02283158 0.00000000
## 80      0 6.135907e+01 0.7961298  0.000000000 0.02283158 0.00000000
## 81      0 7.054802e+01 0.7961298  0.000000000 0.02283158 0.00000000
## 82      0 8.111308e+01 0.7961298  0.000000000 0.02283158 0.00000000
## 83      0 9.326033e+01 0.7961298  0.000000000 0.02283158 0.00000000
## 84      0 1.072267e+02 0.7961298  0.000000000 0.02283158 0.00000000
## 85      0 1.232847e+02 0.7961298  0.000000000 0.02283158 0.00000000
## 86      0 1.417474e+02 0.7961298  0.000000000 0.02283158 0.00000000
## 87      0 1.629751e+02 0.7961298  0.000000000 0.02283158 0.00000000
## 88      0 1.873817e+02 0.7961298  0.000000000 0.02283158 0.00000000
## 89      0 2.154435e+02 0.7961298  0.000000000 0.02283158 0.00000000
## 90      0 2.477076e+02 0.7961298  0.000000000 0.02283158 0.00000000
## 91      0 2.848036e+02 0.7961298  0.000000000 0.02283158 0.00000000
## 92      0 3.274549e+02 0.7961298  0.000000000 0.02283158 0.00000000
## 93      0 3.764936e+02 0.7961298  0.000000000 0.02283158 0.00000000
## 94      0 4.328761e+02 0.7961298  0.000000000 0.02283158 0.00000000
## 95      0 4.977024e+02 0.7961298  0.000000000 0.02283158 0.00000000
## 96      0 5.722368e+02 0.7961298  0.000000000 0.02283158 0.00000000
## 97      0 6.579332e+02 0.7961298  0.000000000 0.02283158 0.00000000
## 98      0 7.564633e+02 0.7961298  0.000000000 0.02283158 0.00000000
## 99      0 8.697490e+02 0.7961298  0.000000000 0.02283158 0.00000000
## 100     0 1.000000e+03 0.7961298  0.000000000 0.02283158 0.00000000
#Visualize accuracy versus values of C
plot(model_ridge)

#Obtain metrics of accuracy from training
confusionMatrix(model_ridge)
## Cross-Validated (10 fold) Confusion Matrix 
## 
## (entries are percentual average cell counts across resamples)
##  
##           Reference
## Prediction    1    2    3
##          1  0.0  0.0  0.0
##          2 17.9 79.6  2.6
##          3  0.0  0.0  0.0
##                             
##  Accuracy (average) : 0.7957
varImp(model_ridge)
## glmnet variable importance
## 
##   variables are sorted by maximum importance across the classes
##   only 20 most important variables shown (out of 59)
## 
##           1     2     3
## q522 100.00 96.89 3.109
## q178 100.00 96.89 3.109
## q532 100.00 96.89 3.109
## q176 100.00 96.89 3.109
## q417  70.46 67.31 3.149
## q654  51.70 48.43 3.262
## q496  39.63 36.48 3.149
## q897  39.29 36.16 3.122
## q216  39.29 36.16 3.122
## q415  31.22 28.03 3.191
## q502  30.40 27.14 3.262
## q174  22.43 25.64 3.205
## q495  22.14 25.30 3.163
## q175  21.86 24.98 3.122
## q416  21.86 24.98 3.122
## q896  21.86 24.98 3.122
## q215  21.76 24.87 3.109
## q476  23.00 19.71 3.292
## q473  20.02 16.72 3.307
## q494  19.49 16.27 3.219
plot(varImp(model_ridge))

var_import = varImp(model_ridge)$importance

Kentucky (KY)

df_ky = df_fit_state %>% 
  filter(sitename == "Kentucky (KY)") %>% 
  select(-sitename, -year)


set.seed(123)
#Create grid to search lambda
lambda <- 10^seq(-3,3, length = 100)

# Specify training control
train_control_ridge <- trainControl(method = "cv", number = 10)

model_ridge = train(q85 ~.,
                    df_ky, 
                    method = "glmnet", 
                    trControl = train_control_ridge, 
                    tuneGrid = expand.grid(alpha = 0, lambda = lambda))

summary(model_ridge)
##             Length Class      Mode     
## a0          300    -none-     numeric  
## beta          3    -none-     list     
## dfmat       300    -none-     numeric  
## df          100    -none-     numeric  
## dim           2    -none-     numeric  
## lambda      100    -none-     numeric  
## dev.ratio   100    -none-     numeric  
## nulldev       1    -none-     numeric  
## npasses       1    -none-     numeric  
## jerr          1    -none-     numeric  
## offset        1    -none-     logical  
## classnames    3    -none-     character
## grouped       1    -none-     logical  
## call          5    -none-     call     
## nobs          1    -none-     numeric  
## lambdaOpt     1    -none-     numeric  
## xNames       59    -none-     character
## problemType   1    -none-     character
## tuneValue     2    data.frame list     
## obsLevels     3    -none-     character
## param         0    -none-     list
model_ridge$bestTune
##     alpha lambda
## 100     0   1000
model_ridge$results
##     alpha       lambda  Accuracy        Kappa  AccuracySD    KappaSD
## 1       0 1.000000e-03 0.6688600  0.020377824 0.081694171 0.15213734
## 2       0 1.149757e-03 0.6688600  0.020377824 0.081694171 0.15213734
## 3       0 1.321941e-03 0.6688600  0.020377824 0.081694171 0.15213734
## 4       0 1.519911e-03 0.6688600  0.020377824 0.081694171 0.15213734
## 5       0 1.747528e-03 0.6688600  0.020377824 0.081694171 0.15213734
## 6       0 2.009233e-03 0.6688600  0.020377824 0.081694171 0.15213734
## 7       0 2.310130e-03 0.6688600  0.020377824 0.081694171 0.15213734
## 8       0 2.656088e-03 0.6688600  0.020377824 0.081694171 0.15213734
## 9       0 3.053856e-03 0.6688600  0.020377824 0.081694171 0.15213734
## 10      0 3.511192e-03 0.6688600  0.020377824 0.081694171 0.15213734
## 11      0 4.037017e-03 0.6688600  0.020377824 0.081694171 0.15213734
## 12      0 4.641589e-03 0.6688600  0.020377824 0.081694171 0.15213734
## 13      0 5.336699e-03 0.6688600  0.020377824 0.081694171 0.15213734
## 14      0 6.135907e-03 0.6688600  0.020377824 0.081694171 0.15213734
## 15      0 7.054802e-03 0.6688600  0.020377824 0.081694171 0.15213734
## 16      0 8.111308e-03 0.6688600  0.020377824 0.081694171 0.15213734
## 17      0 9.326033e-03 0.6688600  0.020377824 0.081694171 0.15213734
## 18      0 1.072267e-02 0.6657350  0.009980198 0.086661437 0.15247153
## 19      0 1.232847e-02 0.6717956  0.015364814 0.079104840 0.14689177
## 20      0 1.417474e-02 0.6717956  0.015364814 0.079104840 0.14689177
## 21      0 1.629751e-02 0.6717956  0.003667931 0.073024744 0.10976474
## 22      0 1.873817e-02 0.6750214  0.006847010 0.074006658 0.10787139
## 23      0 2.154435e-02 0.6750214  0.006847010 0.074006658 0.10787139
## 24      0 2.477076e-02 0.6782472 -0.001108194 0.073259763 0.09806450
## 25      0 2.848036e-02 0.6751222 -0.019077187 0.083354329 0.11768263
## 26      0 3.274549e-02 0.6783480 -0.014959442 0.082687538 0.11361323
## 27      0 3.764936e-02 0.6815738 -0.010726980 0.081874231 0.11173048
## 28      0 4.328761e-02 0.6847996 -0.004670510 0.085089060 0.12090317
## 29      0 4.977024e-02 0.6909549  0.012167209 0.074799922 0.10346647
## 30      0 5.722368e-02 0.6939791  0.003407853 0.065151059 0.11380246
## 31      0 6.579332e-02 0.6938783 -0.010980597 0.056548448 0.09409698
## 32      0 7.564633e-02 0.6938783 -0.010980597 0.056548448 0.09409698
## 33      0 8.697490e-02 0.6938783 -0.010980597 0.056548448 0.09409698
## 34      0 1.000000e-01 0.6876222 -0.041533219 0.060381610 0.10270060
## 35      0 1.149757e-01 0.6938783 -0.047886022 0.047673521 0.05547071
## 36      0 1.321941e-01 0.6938783 -0.047886022 0.047673521 0.05547071
## 37      0 1.519911e-01 0.6938783 -0.047886022 0.047673521 0.05547071
## 38      0 1.747528e-01 0.6969086 -0.044074518 0.044466705 0.05041797
## 39      0 2.009233e-01 0.7064852 -0.030608179 0.037899244 0.05223278
## 40      0 2.310130e-01 0.7129368 -0.019973896 0.040830277 0.05882606
## 41      0 2.656088e-01 0.7129368 -0.019973896 0.040830277 0.05882606
## 42      0 3.053856e-01 0.7129368 -0.026388990 0.040830277 0.05169890
## 43      0 3.511192e-01 0.7256384 -0.008379081 0.029267647 0.05619614
## 44      0 4.037017e-01 0.7257392 -0.019210061 0.027980189 0.03272052
## 45      0 4.641589e-01 0.7257392 -0.019210061 0.027980189 0.03272052
## 46      0 5.336699e-01 0.7319954 -0.009679938 0.021074162 0.02041042
## 47      0 6.135907e-01 0.7319954 -0.009679938 0.021074162 0.02041042
## 48      0 7.054802e-01 0.7319954 -0.009679938 0.021074162 0.02041042
## 49      0 8.111308e-01 0.7350257 -0.004918033 0.017322951 0.01555219
## 50      0 9.326033e-01 0.7350257 -0.004918033 0.017322951 0.01555219
## 51      0 1.072267e+00 0.7381507  0.000000000 0.008228097 0.00000000
## 52      0 1.232847e+00 0.7381507  0.000000000 0.008228097 0.00000000
## 53      0 1.417474e+00 0.7381507  0.000000000 0.008228097 0.00000000
## 54      0 1.629751e+00 0.7381507  0.000000000 0.008228097 0.00000000
## 55      0 1.873817e+00 0.7381507  0.000000000 0.008228097 0.00000000
## 56      0 2.154435e+00 0.7381507  0.000000000 0.008228097 0.00000000
## 57      0 2.477076e+00 0.7381507  0.000000000 0.008228097 0.00000000
## 58      0 2.848036e+00 0.7381507  0.000000000 0.008228097 0.00000000
## 59      0 3.274549e+00 0.7381507  0.000000000 0.008228097 0.00000000
## 60      0 3.764936e+00 0.7381507  0.000000000 0.008228097 0.00000000
## 61      0 4.328761e+00 0.7381507  0.000000000 0.008228097 0.00000000
## 62      0 4.977024e+00 0.7381507  0.000000000 0.008228097 0.00000000
## 63      0 5.722368e+00 0.7381507  0.000000000 0.008228097 0.00000000
## 64      0 6.579332e+00 0.7381507  0.000000000 0.008228097 0.00000000
## 65      0 7.564633e+00 0.7381507  0.000000000 0.008228097 0.00000000
## 66      0 8.697490e+00 0.7381507  0.000000000 0.008228097 0.00000000
## 67      0 1.000000e+01 0.7381507  0.000000000 0.008228097 0.00000000
## 68      0 1.149757e+01 0.7381507  0.000000000 0.008228097 0.00000000
## 69      0 1.321941e+01 0.7381507  0.000000000 0.008228097 0.00000000
## 70      0 1.519911e+01 0.7381507  0.000000000 0.008228097 0.00000000
## 71      0 1.747528e+01 0.7381507  0.000000000 0.008228097 0.00000000
## 72      0 2.009233e+01 0.7381507  0.000000000 0.008228097 0.00000000
## 73      0 2.310130e+01 0.7381507  0.000000000 0.008228097 0.00000000
## 74      0 2.656088e+01 0.7381507  0.000000000 0.008228097 0.00000000
## 75      0 3.053856e+01 0.7381507  0.000000000 0.008228097 0.00000000
## 76      0 3.511192e+01 0.7381507  0.000000000 0.008228097 0.00000000
## 77      0 4.037017e+01 0.7381507  0.000000000 0.008228097 0.00000000
## 78      0 4.641589e+01 0.7381507  0.000000000 0.008228097 0.00000000
## 79      0 5.336699e+01 0.7381507  0.000000000 0.008228097 0.00000000
## 80      0 6.135907e+01 0.7381507  0.000000000 0.008228097 0.00000000
## 81      0 7.054802e+01 0.7381507  0.000000000 0.008228097 0.00000000
## 82      0 8.111308e+01 0.7381507  0.000000000 0.008228097 0.00000000
## 83      0 9.326033e+01 0.7381507  0.000000000 0.008228097 0.00000000
## 84      0 1.072267e+02 0.7381507  0.000000000 0.008228097 0.00000000
## 85      0 1.232847e+02 0.7381507  0.000000000 0.008228097 0.00000000
## 86      0 1.417474e+02 0.7381507  0.000000000 0.008228097 0.00000000
## 87      0 1.629751e+02 0.7381507  0.000000000 0.008228097 0.00000000
## 88      0 1.873817e+02 0.7381507  0.000000000 0.008228097 0.00000000
## 89      0 2.154435e+02 0.7381507  0.000000000 0.008228097 0.00000000
## 90      0 2.477076e+02 0.7381507  0.000000000 0.008228097 0.00000000
## 91      0 2.848036e+02 0.7381507  0.000000000 0.008228097 0.00000000
## 92      0 3.274549e+02 0.7381507  0.000000000 0.008228097 0.00000000
## 93      0 3.764936e+02 0.7381507  0.000000000 0.008228097 0.00000000
## 94      0 4.328761e+02 0.7381507  0.000000000 0.008228097 0.00000000
## 95      0 4.977024e+02 0.7381507  0.000000000 0.008228097 0.00000000
## 96      0 5.722368e+02 0.7381507  0.000000000 0.008228097 0.00000000
## 97      0 6.579332e+02 0.7381507  0.000000000 0.008228097 0.00000000
## 98      0 7.564633e+02 0.7381507  0.000000000 0.008228097 0.00000000
## 99      0 8.697490e+02 0.7381507  0.000000000 0.008228097 0.00000000
## 100     0 1.000000e+03 0.7381507  0.000000000 0.008228097 0.00000000
#Visualize accuracy versus values of C
plot(model_ridge)

#Obtain metrics of accuracy from training
confusionMatrix(model_ridge)
## Cross-Validated (10 fold) Confusion Matrix 
## 
## (entries are percentual average cell counts across resamples)
##  
##           Reference
## Prediction    1    2    3
##          1  0.0  0.0  0.0
##          2 19.2 73.8  7.0
##          3  0.0  0.0  0.0
##                            
##  Accuracy (average) : 0.738
varImp(model_ridge)
## glmnet variable importance
## 
##   variables are sorted by maximum importance across the classes
##   only 20 most important variables shown (out of 59)
## 
##             1       2       3
## q416   41.775 100.000 58.2251
## q532   64.565  55.011  9.5545
## q495   42.045  32.460  9.5855
## q522   41.775  32.251  9.5238
## q215   41.775  32.251  9.5238
## q174   26.142  35.728  9.5855
## q895   26.058  35.612  9.5545
## q176   25.974  35.498  9.5238
## q896   25.891  35.384  9.4933
## q177   25.891  35.384  9.4933
## q873    8.056  32.886 24.8297
## q496   25.326  32.886  7.5593
## q897   26.485   1.655 24.8297
## q214   26.398  16.405  9.9937
## q654   26.327  15.970 10.3563
## q213   23.992  14.185  9.8076
## race44 23.318  23.479  0.1608
## q178   12.948  22.941  9.9937
## q892   20.595  19.431  1.1637
## q474   20.029  10.090  9.9393
plot(varImp(model_ridge))

var_import = varImp(model_ridge)$importance

Michigan (MI)

df_mi = df_fit_state %>% 
  filter(sitename == "Michigan (MI)") %>% 
  select(-sitename, -year)


set.seed(123)
#Create grid to search lambda
lambda <- 10^seq(-3,3, length = 100)

# Specify training control
train_control_ridge <- trainControl(method = "cv", number = 10)

model_ridge = train(q85 ~.,
                    df_mi , 
                    method = "glmnet", 
                    trControl = train_control_ridge, 
                    tuneGrid = expand.grid(alpha = 0, lambda = lambda))

summary(model_ridge)
##             Length Class      Mode     
## a0          300    -none-     numeric  
## beta          3    -none-     list     
## dfmat       300    -none-     numeric  
## df          100    -none-     numeric  
## dim           2    -none-     numeric  
## lambda      100    -none-     numeric  
## dev.ratio   100    -none-     numeric  
## nulldev       1    -none-     numeric  
## npasses       1    -none-     numeric  
## jerr          1    -none-     numeric  
## offset        1    -none-     logical  
## classnames    3    -none-     character
## grouped       1    -none-     logical  
## call          5    -none-     call     
## nobs          1    -none-     numeric  
## lambdaOpt     1    -none-     numeric  
## xNames       59    -none-     character
## problemType   1    -none-     character
## tuneValue     2    data.frame list     
## obsLevels     3    -none-     character
## param         0    -none-     list
model_ridge$bestTune
##    alpha    lambda
## 44     0 0.4037017
model_ridge$results
##     alpha       lambda  Accuracy       Kappa  AccuracySD    KappaSD
## 1       0 1.000000e-03 0.6514632 0.112869849 0.034069020 0.08977637
## 2       0 1.149757e-03 0.6514632 0.112869849 0.034069020 0.08977637
## 3       0 1.321941e-03 0.6514632 0.112869849 0.034069020 0.08977637
## 4       0 1.519911e-03 0.6514632 0.112869849 0.034069020 0.08977637
## 5       0 1.747528e-03 0.6514632 0.112869849 0.034069020 0.08977637
## 6       0 2.009233e-03 0.6514632 0.112869849 0.034069020 0.08977637
## 7       0 2.310130e-03 0.6514632 0.112869849 0.034069020 0.08977637
## 8       0 2.656088e-03 0.6514632 0.112869849 0.034069020 0.08977637
## 9       0 3.053856e-03 0.6514632 0.112869849 0.034069020 0.08977637
## 10      0 3.511192e-03 0.6514632 0.112869849 0.034069020 0.08977637
## 11      0 4.037017e-03 0.6514632 0.112869849 0.034069020 0.08977637
## 12      0 4.641589e-03 0.6514632 0.112869849 0.034069020 0.08977637
## 13      0 5.336699e-03 0.6514632 0.112869849 0.034069020 0.08977637
## 14      0 6.135907e-03 0.6514632 0.112869849 0.034069020 0.08977637
## 15      0 7.054802e-03 0.6514632 0.112869849 0.034069020 0.08977637
## 16      0 8.111308e-03 0.6514632 0.112869849 0.034069020 0.08977637
## 17      0 9.326033e-03 0.6514632 0.110493197 0.034069020 0.08925632
## 18      0 1.072267e-02 0.6514632 0.110493197 0.034069020 0.08925632
## 19      0 1.232847e-02 0.6514632 0.110493197 0.034069020 0.08925632
## 20      0 1.417474e-02 0.6514632 0.110493197 0.034069020 0.08925632
## 21      0 1.629751e-02 0.6514037 0.107237127 0.027257613 0.07227399
## 22      0 1.873817e-02 0.6496493 0.100747086 0.030095969 0.08092008
## 23      0 2.154435e-02 0.6496493 0.100747086 0.030095969 0.08092008
## 24      0 2.477076e-02 0.6513140 0.099864477 0.030475122 0.07524785
## 25      0 2.848036e-02 0.6513140 0.096513566 0.029331484 0.07165150
## 26      0 3.274549e-02 0.6513140 0.094224273 0.029331484 0.06881953
## 27      0 3.764936e-02 0.6495596 0.085360298 0.029994288 0.08215066
## 28      0 4.328761e-02 0.6495596 0.083750520 0.029994288 0.08070521
## 29      0 4.977024e-02 0.6512545 0.084768516 0.029049291 0.07940460
## 30      0 5.722368e-02 0.6599065 0.095940458 0.033445269 0.08627548
## 31      0 6.579332e-02 0.6599065 0.095940458 0.033445269 0.08627548
## 32      0 7.564633e-02 0.6615419 0.093217009 0.029450821 0.07952456
## 33      0 8.697490e-02 0.6650205 0.095667762 0.029165914 0.08021368
## 34      0 1.000000e-01 0.6684103 0.098303513 0.030584339 0.08092996
## 35      0 1.149757e-01 0.6667154 0.084206422 0.029323339 0.08530491
## 36      0 1.321941e-01 0.6719191 0.091546644 0.031105062 0.08874658
## 37      0 1.519911e-01 0.6736734 0.092653121 0.033385935 0.09651339
## 38      0 1.747528e-01 0.6754278 0.085377519 0.030514343 0.08241558
## 39      0 2.009233e-01 0.6822670 0.097764758 0.033567157 0.08746708
## 40      0 2.310130e-01 0.6805126 0.089109459 0.031830194 0.08269646
## 41      0 2.656088e-01 0.6788771 0.078692432 0.034788261 0.09253019
## 42      0 3.053856e-01 0.6787582 0.066100266 0.022677204 0.06680470
## 43      0 3.511192e-01 0.6838429 0.070742845 0.013758788 0.05059500
## 44      0 4.037017e-01 0.6838732 0.067088517 0.015352370 0.04888711
## 45      0 4.641589e-01 0.6803644 0.040442795 0.014964758 0.04997134
## 46      0 5.336699e-01 0.6838429 0.042487842 0.015910464 0.05868352
## 47      0 6.135907e-01 0.6838137 0.034449269 0.014486132 0.05173942
## 48      0 7.054802e-01 0.6821480 0.023276666 0.010606101 0.03918748
## 49      0 8.111308e-01 0.6822075 0.019170614 0.008376135 0.03218248
## 50      0 9.326033e-01 0.6804834 0.011977163 0.011722197 0.03453117
## 51      0 1.072267e+00 0.6787592 0.004694748 0.011488923 0.02716692
## 52      0 1.232847e+00 0.6787592 0.004694748 0.011488923 0.02716692
## 53      0 1.417474e+00 0.6787592 0.004694748 0.011488923 0.02716692
## 54      0 1.629751e+00 0.6787592 0.004694748 0.011488923 0.02716692
## 55      0 1.873817e+00 0.6804834 0.007714286 0.008446951 0.02439471
## 56      0 2.154435e+00 0.6804834 0.007714286 0.008446951 0.02439471
## 57      0 2.477076e+00 0.6804834 0.007714286 0.008446951 0.02439471
## 58      0 2.848036e+00 0.6787290 0.000000000 0.004381972 0.00000000
## 59      0 3.274549e+00 0.6787290 0.000000000 0.004381972 0.00000000
## 60      0 3.764936e+00 0.6787290 0.000000000 0.004381972 0.00000000
## 61      0 4.328761e+00 0.6787290 0.000000000 0.004381972 0.00000000
## 62      0 4.977024e+00 0.6787290 0.000000000 0.004381972 0.00000000
## 63      0 5.722368e+00 0.6787290 0.000000000 0.004381972 0.00000000
## 64      0 6.579332e+00 0.6787290 0.000000000 0.004381972 0.00000000
## 65      0 7.564633e+00 0.6787290 0.000000000 0.004381972 0.00000000
## 66      0 8.697490e+00 0.6787290 0.000000000 0.004381972 0.00000000
## 67      0 1.000000e+01 0.6787290 0.000000000 0.004381972 0.00000000
## 68      0 1.149757e+01 0.6787290 0.000000000 0.004381972 0.00000000
## 69      0 1.321941e+01 0.6787290 0.000000000 0.004381972 0.00000000
## 70      0 1.519911e+01 0.6787290 0.000000000 0.004381972 0.00000000
## 71      0 1.747528e+01 0.6787290 0.000000000 0.004381972 0.00000000
## 72      0 2.009233e+01 0.6787290 0.000000000 0.004381972 0.00000000
## 73      0 2.310130e+01 0.6787290 0.000000000 0.004381972 0.00000000
## 74      0 2.656088e+01 0.6787290 0.000000000 0.004381972 0.00000000
## 75      0 3.053856e+01 0.6787290 0.000000000 0.004381972 0.00000000
## 76      0 3.511192e+01 0.6787290 0.000000000 0.004381972 0.00000000
## 77      0 4.037017e+01 0.6787290 0.000000000 0.004381972 0.00000000
## 78      0 4.641589e+01 0.6787290 0.000000000 0.004381972 0.00000000
## 79      0 5.336699e+01 0.6787290 0.000000000 0.004381972 0.00000000
## 80      0 6.135907e+01 0.6787290 0.000000000 0.004381972 0.00000000
## 81      0 7.054802e+01 0.6787290 0.000000000 0.004381972 0.00000000
## 82      0 8.111308e+01 0.6787290 0.000000000 0.004381972 0.00000000
## 83      0 9.326033e+01 0.6787290 0.000000000 0.004381972 0.00000000
## 84      0 1.072267e+02 0.6787290 0.000000000 0.004381972 0.00000000
## 85      0 1.232847e+02 0.6787290 0.000000000 0.004381972 0.00000000
## 86      0 1.417474e+02 0.6787290 0.000000000 0.004381972 0.00000000
## 87      0 1.629751e+02 0.6787290 0.000000000 0.004381972 0.00000000
## 88      0 1.873817e+02 0.6787290 0.000000000 0.004381972 0.00000000
## 89      0 2.154435e+02 0.6787290 0.000000000 0.004381972 0.00000000
## 90      0 2.477076e+02 0.6787290 0.000000000 0.004381972 0.00000000
## 91      0 2.848036e+02 0.6787290 0.000000000 0.004381972 0.00000000
## 92      0 3.274549e+02 0.6787290 0.000000000 0.004381972 0.00000000
## 93      0 3.764936e+02 0.6787290 0.000000000 0.004381972 0.00000000
## 94      0 4.328761e+02 0.6787290 0.000000000 0.004381972 0.00000000
## 95      0 4.977024e+02 0.6787290 0.000000000 0.004381972 0.00000000
## 96      0 5.722368e+02 0.6787290 0.000000000 0.004381972 0.00000000
## 97      0 6.579332e+02 0.6787290 0.000000000 0.004381972 0.00000000
## 98      0 7.564633e+02 0.6787290 0.000000000 0.004381972 0.00000000
## 99      0 8.697490e+02 0.6787290 0.000000000 0.004381972 0.00000000
## 100     0 1.000000e+03 0.6787290 0.000000000 0.004381972 0.00000000
#Visualize accuracy versus values of C
plot(model_ridge)

#Obtain metrics of accuracy from training
confusionMatrix(model_ridge)
## Cross-Validated (10 fold) Confusion Matrix 
## 
## (entries are percentual average cell counts across resamples)
##  
##           Reference
## Prediction    1    2    3
##          1  2.4  1.9  0.0
##          2 23.7 66.0  6.0
##          3  0.0  0.0  0.0
##                             
##  Accuracy (average) : 0.6838
varImp(model_ridge)
## glmnet variable importance
## 
##   variables are sorted by maximum importance across the classes
##   only 20 most important variables shown (out of 59)
## 
##             1     2       3
## q896   75.799 24.16 100.000
## q417   94.994 81.42  13.534
## q176   67.033 22.58  89.655
## q177   70.844 87.18  16.296
## q416   63.843 79.99  16.099
## q496   47.662 69.25  21.547
## q175   51.841 69.04  17.161
## q532   67.189 43.12  24.025
## q215   43.362 60.12  16.718
## q216   21.397 57.52  36.079
## q495    1.352 54.49  53.095
## q522   51.776 26.95  24.788
## q493   28.934 42.40  13.423
## q895   35.566 16.38  19.139
## q873   27.023 34.93   7.864
## race42 29.914 32.12   2.165
## q475   28.050  7.13  20.877
## age7   27.885 18.80   9.038
## q213   27.690 26.32   1.326
## age4   26.849 18.99   7.813
plot(varImp(model_ridge))

var_import = varImp(model_ridge)$importance

Mississippi (MS)

df_ms = df_fit_state %>% 
  filter(sitename == "Mississippi (MS)") %>% 
  select(-sitename, -year)


set.seed(123)
#Create grid to search lambda
lambda <- 10^seq(-3,3, length = 100)

# Specify training control
train_control_ridge <- trainControl(method = "cv", number = 10)

model_ridge = train(q85 ~.,
                    df_ms, 
                    method = "glmnet", 
                    trControl = train_control_ridge, 
                    tuneGrid = expand.grid(alpha = 0, lambda = lambda))

summary(model_ridge)
##             Length Class      Mode     
## a0          300    -none-     numeric  
## beta          3    -none-     list     
## dfmat       300    -none-     numeric  
## df          100    -none-     numeric  
## dim           2    -none-     numeric  
## lambda      100    -none-     numeric  
## dev.ratio   100    -none-     numeric  
## nulldev       1    -none-     numeric  
## npasses       1    -none-     numeric  
## jerr          1    -none-     numeric  
## offset        1    -none-     logical  
## classnames    3    -none-     character
## grouped       1    -none-     logical  
## call          5    -none-     call     
## nobs          1    -none-     numeric  
## lambdaOpt     1    -none-     numeric  
## xNames       59    -none-     character
## problemType   1    -none-     character
## tuneValue     2    data.frame list     
## obsLevels     3    -none-     character
## param         0    -none-     list
model_ridge$bestTune
##    alpha     lambda
## 32     0 0.07564633
model_ridge$results
##     alpha       lambda  Accuracy        Kappa AccuracySD    KappaSD
## 1       0 1.000000e-03 0.7360908  0.192025669 0.08191557 0.23085814
## 2       0 1.149757e-03 0.7360908  0.192025669 0.08191557 0.23085814
## 3       0 1.321941e-03 0.7360908  0.192025669 0.08191557 0.23085814
## 4       0 1.519911e-03 0.7360908  0.192025669 0.08191557 0.23085814
## 5       0 1.747528e-03 0.7360908  0.192025669 0.08191557 0.23085814
## 6       0 2.009233e-03 0.7360908  0.192025669 0.08191557 0.23085814
## 7       0 2.310130e-03 0.7360908  0.192025669 0.08191557 0.23085814
## 8       0 2.656088e-03 0.7360908  0.192025669 0.08191557 0.23085814
## 9       0 3.053856e-03 0.7360908  0.192025669 0.08191557 0.23085814
## 10      0 3.511192e-03 0.7360908  0.192025669 0.08191557 0.23085814
## 11      0 4.037017e-03 0.7360908  0.192025669 0.08191557 0.23085814
## 12      0 4.641589e-03 0.7360908  0.192025669 0.08191557 0.23085814
## 13      0 5.336699e-03 0.7360908  0.192025669 0.08191557 0.23085814
## 14      0 6.135907e-03 0.7360908  0.192025669 0.08191557 0.23085814
## 15      0 7.054802e-03 0.7360908  0.192025669 0.08191557 0.23085814
## 16      0 8.111308e-03 0.7360908  0.192025669 0.08191557 0.23085814
## 17      0 9.326033e-03 0.7360908  0.192025669 0.08191557 0.23085814
## 18      0 1.072267e-02 0.7476292  0.218437985 0.08704218 0.25737306
## 19      0 1.232847e-02 0.7476292  0.218437985 0.08704218 0.25737306
## 20      0 1.417474e-02 0.7476292  0.205215010 0.08704218 0.25700389
## 21      0 1.629751e-02 0.7550468  0.209788985 0.08246444 0.24570791
## 22      0 1.873817e-02 0.7588930  0.214396073 0.07799583 0.24191647
## 23      0 2.154435e-02 0.7625967  0.218796788 0.07593610 0.23952629
## 24      0 2.477076e-02 0.7625967  0.218796788 0.07593610 0.23952629
## 25      0 2.848036e-02 0.7702890  0.226160726 0.07180868 0.23237820
## 26      0 3.274549e-02 0.7779813  0.241253901 0.07569752 0.23702819
## 27      0 3.764936e-02 0.7855311  0.250560621 0.06584177 0.23236552
## 28      0 4.328761e-02 0.7855311  0.243332375 0.06584177 0.23232003
## 29      0 4.977024e-02 0.7819597  0.200449665 0.07319058 0.27261404
## 30      0 5.722368e-02 0.7858059  0.205302825 0.07118472 0.26684909
## 31      0 6.579332e-02 0.7858059  0.196547820 0.07118472 0.25310784
## 32      0 7.564633e-02 0.7896520  0.202670269 0.07122869 0.24684269
## 33      0 8.697490e-02 0.7819597  0.152058663 0.07319058 0.27022182
## 34      0 1.000000e-01 0.7819597  0.152058663 0.07319058 0.27022182
## 35      0 1.149757e-01 0.7819597  0.145165768 0.07319058 0.26299110
## 36      0 1.321941e-01 0.7819597  0.126837564 0.06665108 0.22582616
## 37      0 1.519911e-01 0.7819597  0.126837564 0.06665108 0.22582616
## 38      0 1.747528e-01 0.7855311  0.128869495 0.06123214 0.22387202
## 39      0 2.009233e-01 0.7816850  0.105682171 0.05523070 0.18739011
## 40      0 2.310130e-01 0.7816850  0.105682171 0.05523070 0.18739011
## 41      0 2.656088e-01 0.7816850  0.105682171 0.05523070 0.18739011
## 42      0 3.053856e-01 0.7816850  0.105682171 0.05523070 0.18739011
## 43      0 3.511192e-01 0.7779813  0.068018279 0.05108913 0.15282746
## 44      0 4.037017e-01 0.7818274  0.054296756 0.04177133 0.13788769
## 45      0 4.641589e-01 0.7816952  0.040376150 0.04290781 0.13572323
## 46      0 5.336699e-01 0.7855413  0.046498599 0.04338751 0.13195953
## 47      0 6.135907e-01 0.7855413  0.046498599 0.04338751 0.13195953
## 48      0 7.054802e-01 0.7892450  0.052380952 0.03668909 0.12799444
## 49      0 8.111308e-01 0.7815527 -0.006122449 0.02522447 0.01936088
## 50      0 9.326033e-01 0.7815527 -0.006122449 0.02522447 0.01936088
## 51      0 1.072267e+00 0.7853989  0.000000000 0.02605546 0.00000000
## 52      0 1.232847e+00 0.7853989  0.000000000 0.02605546 0.00000000
## 53      0 1.417474e+00 0.7853989  0.000000000 0.02605546 0.00000000
## 54      0 1.629751e+00 0.7853989  0.000000000 0.02605546 0.00000000
## 55      0 1.873817e+00 0.7853989  0.000000000 0.02605546 0.00000000
## 56      0 2.154435e+00 0.7853989  0.000000000 0.02605546 0.00000000
## 57      0 2.477076e+00 0.7853989  0.000000000 0.02605546 0.00000000
## 58      0 2.848036e+00 0.7853989  0.000000000 0.02605546 0.00000000
## 59      0 3.274549e+00 0.7853989  0.000000000 0.02605546 0.00000000
## 60      0 3.764936e+00 0.7853989  0.000000000 0.02605546 0.00000000
## 61      0 4.328761e+00 0.7853989  0.000000000 0.02605546 0.00000000
## 62      0 4.977024e+00 0.7853989  0.000000000 0.02605546 0.00000000
## 63      0 5.722368e+00 0.7853989  0.000000000 0.02605546 0.00000000
## 64      0 6.579332e+00 0.7853989  0.000000000 0.02605546 0.00000000
## 65      0 7.564633e+00 0.7853989  0.000000000 0.02605546 0.00000000
## 66      0 8.697490e+00 0.7853989  0.000000000 0.02605546 0.00000000
## 67      0 1.000000e+01 0.7853989  0.000000000 0.02605546 0.00000000
## 68      0 1.149757e+01 0.7853989  0.000000000 0.02605546 0.00000000
## 69      0 1.321941e+01 0.7853989  0.000000000 0.02605546 0.00000000
## 70      0 1.519911e+01 0.7853989  0.000000000 0.02605546 0.00000000
## 71      0 1.747528e+01 0.7853989  0.000000000 0.02605546 0.00000000
## 72      0 2.009233e+01 0.7853989  0.000000000 0.02605546 0.00000000
## 73      0 2.310130e+01 0.7853989  0.000000000 0.02605546 0.00000000
## 74      0 2.656088e+01 0.7853989  0.000000000 0.02605546 0.00000000
## 75      0 3.053856e+01 0.7853989  0.000000000 0.02605546 0.00000000
## 76      0 3.511192e+01 0.7853989  0.000000000 0.02605546 0.00000000
## 77      0 4.037017e+01 0.7853989  0.000000000 0.02605546 0.00000000
## 78      0 4.641589e+01 0.7853989  0.000000000 0.02605546 0.00000000
## 79      0 5.336699e+01 0.7853989  0.000000000 0.02605546 0.00000000
## 80      0 6.135907e+01 0.7853989  0.000000000 0.02605546 0.00000000
## 81      0 7.054802e+01 0.7853989  0.000000000 0.02605546 0.00000000
## 82      0 8.111308e+01 0.7853989  0.000000000 0.02605546 0.00000000
## 83      0 9.326033e+01 0.7853989  0.000000000 0.02605546 0.00000000
## 84      0 1.072267e+02 0.7853989  0.000000000 0.02605546 0.00000000
## 85      0 1.232847e+02 0.7853989  0.000000000 0.02605546 0.00000000
## 86      0 1.417474e+02 0.7853989  0.000000000 0.02605546 0.00000000
## 87      0 1.629751e+02 0.7853989  0.000000000 0.02605546 0.00000000
## 88      0 1.873817e+02 0.7853989  0.000000000 0.02605546 0.00000000
## 89      0 2.154435e+02 0.7853989  0.000000000 0.02605546 0.00000000
## 90      0 2.477076e+02 0.7853989  0.000000000 0.02605546 0.00000000
## 91      0 2.848036e+02 0.7853989  0.000000000 0.02605546 0.00000000
## 92      0 3.274549e+02 0.7853989  0.000000000 0.02605546 0.00000000
## 93      0 3.764936e+02 0.7853989  0.000000000 0.02605546 0.00000000
## 94      0 4.328761e+02 0.7853989  0.000000000 0.02605546 0.00000000
## 95      0 4.977024e+02 0.7853989  0.000000000 0.02605546 0.00000000
## 96      0 5.722368e+02 0.7853989  0.000000000 0.02605546 0.00000000
## 97      0 6.579332e+02 0.7853989  0.000000000 0.02605546 0.00000000
## 98      0 7.564633e+02 0.7853989  0.000000000 0.02605546 0.00000000
## 99      0 8.697490e+02 0.7853989  0.000000000 0.02605546 0.00000000
## 100     0 1.000000e+03 0.7853989  0.000000000 0.02605546 0.00000000
#Visualize accuracy versus values of C
plot(model_ridge)

#Obtain metrics of accuracy from training
confusionMatrix(model_ridge)
## Cross-Validated (10 fold) Confusion Matrix 
## 
## (entries are percentual average cell counts across resamples)
##  
##           Reference
## Prediction    1    2    3
##          1  3.4  2.6  0.4
##          2 12.1 75.1  4.9
##          3  0.4  0.8  0.4
##                             
##  Accuracy (average) : 0.7887
varImp(model_ridge)
## glmnet variable importance
## 
##   variables are sorted by maximum importance across the classes
##   only 20 most important variables shown (out of 59)
## 
##               1      2      3
## q495      57.95 100.00 42.008
## q215      63.89  94.54 30.611
## q894      72.19  88.20 15.978
## q216      86.85  47.80 39.008
## q415      41.98  35.45 77.472
## q178      43.69  76.14 32.407
## q475      59.84  70.18 10.303
## q896      45.54  69.33 23.755
## q476      69.19  63.93  5.221
## q522      67.35  24.71 42.595
## q177      39.04  27.33 66.408
## q416      63.06  48.18 14.843
## q174      62.48  28.02 34.420
## q496      60.37  27.61 32.715
## q873      34.10  60.37 26.230
## q175      25.90  27.36 53.307
## q176      34.06  45.49 11.390
## q414      43.43  16.95 26.441
## q895      40.33  10.30 29.984
## qnothhpl2 38.66  14.60 24.017
plot(varImp(model_ridge))

var_import = varImp(model_ridge)$importance

Nebraska (NE)

df_ne = df_fit_state %>% 
  filter(sitename == "Nebraska (NE)") %>% 
  select(-sitename, -year)


set.seed(123)
#Create grid to search lambda
lambda <- 10^seq(-3,3, length = 100)

# Specify training control
train_control_ridge <- trainControl(method = "cv", number = 10)

model_ridge = train(q85 ~.,
                    df_ne, 
                    method = "glmnet", 
                    trControl = train_control_ridge, 
                    tuneGrid = expand.grid(alpha = 0, lambda = lambda))
## Warning in lognet(xd, is.sparse, ix, jx, y, weights, offset, alpha, nobs, : one
## multinomial or binomial class has fewer than 8 observations; dangerous ground

## Warning in lognet(xd, is.sparse, ix, jx, y, weights, offset, alpha, nobs, : one
## multinomial or binomial class has fewer than 8 observations; dangerous ground

## Warning in lognet(xd, is.sparse, ix, jx, y, weights, offset, alpha, nobs, : one
## multinomial or binomial class has fewer than 8 observations; dangerous ground

## Warning in lognet(xd, is.sparse, ix, jx, y, weights, offset, alpha, nobs, : one
## multinomial or binomial class has fewer than 8 observations; dangerous ground

## Warning in lognet(xd, is.sparse, ix, jx, y, weights, offset, alpha, nobs, : one
## multinomial or binomial class has fewer than 8 observations; dangerous ground

## Warning in lognet(xd, is.sparse, ix, jx, y, weights, offset, alpha, nobs, : one
## multinomial or binomial class has fewer than 8 observations; dangerous ground

## Warning in lognet(xd, is.sparse, ix, jx, y, weights, offset, alpha, nobs, : one
## multinomial or binomial class has fewer than 8 observations; dangerous ground

## Warning in lognet(xd, is.sparse, ix, jx, y, weights, offset, alpha, nobs, : one
## multinomial or binomial class has fewer than 8 observations; dangerous ground
summary(model_ridge)
##             Length Class      Mode     
## a0          300    -none-     numeric  
## beta          3    -none-     list     
## dfmat       300    -none-     numeric  
## df          100    -none-     numeric  
## dim           2    -none-     numeric  
## lambda      100    -none-     numeric  
## dev.ratio   100    -none-     numeric  
## nulldev       1    -none-     numeric  
## npasses       1    -none-     numeric  
## jerr          1    -none-     numeric  
## offset        1    -none-     logical  
## classnames    3    -none-     character
## grouped       1    -none-     logical  
## call          5    -none-     call     
## nobs          1    -none-     numeric  
## lambdaOpt     1    -none-     numeric  
## xNames       59    -none-     character
## problemType   1    -none-     character
## tuneValue     2    data.frame list     
## obsLevels     3    -none-     character
## param         0    -none-     list
model_ridge$bestTune
##    alpha    lambda
## 46     0 0.5336699
model_ridge$results
##     alpha       lambda  Accuracy      Kappa AccuracySD    KappaSD
## 1       0 1.000000e-03 0.7460652 0.15755548 0.04981844 0.13782059
## 2       0 1.149757e-03 0.7460652 0.15755548 0.04981844 0.13782059
## 3       0 1.321941e-03 0.7460652 0.15755548 0.04981844 0.13782059
## 4       0 1.519911e-03 0.7460652 0.15755548 0.04981844 0.13782059
## 5       0 1.747528e-03 0.7460652 0.15755548 0.04981844 0.13782059
## 6       0 2.009233e-03 0.7460652 0.15755548 0.04981844 0.13782059
## 7       0 2.310130e-03 0.7460652 0.15755548 0.04981844 0.13782059
## 8       0 2.656088e-03 0.7460652 0.15755548 0.04981844 0.13782059
## 9       0 3.053856e-03 0.7460652 0.15755548 0.04981844 0.13782059
## 10      0 3.511192e-03 0.7460652 0.15755548 0.04981844 0.13782059
## 11      0 4.037017e-03 0.7460652 0.15755548 0.04981844 0.13782059
## 12      0 4.641589e-03 0.7460652 0.15755548 0.04981844 0.13782059
## 13      0 5.336699e-03 0.7460652 0.15755548 0.04981844 0.13782059
## 14      0 6.135907e-03 0.7460652 0.15755548 0.04981844 0.13782059
## 15      0 7.054802e-03 0.7460652 0.15755548 0.04981844 0.13782059
## 16      0 8.111308e-03 0.7460652 0.15755548 0.04981844 0.13782059
## 17      0 9.326033e-03 0.7460652 0.15755548 0.04981844 0.13782059
## 18      0 1.072267e-02 0.7510652 0.18314132 0.04079812 0.09340409
## 19      0 1.232847e-02 0.7510652 0.18314132 0.04079812 0.09340409
## 20      0 1.417474e-02 0.7510652 0.18314132 0.04079812 0.09340409
## 21      0 1.629751e-02 0.7510652 0.16707366 0.03330061 0.04683574
## 22      0 1.873817e-02 0.7560652 0.17564422 0.03670280 0.05524081
## 23      0 2.154435e-02 0.7513033 0.15006730 0.04489948 0.11167736
## 24      0 2.477076e-02 0.7513033 0.14875035 0.04489948 0.11217868
## 25      0 2.848036e-02 0.7560652 0.15946464 0.05346786 0.12705401
## 26      0 3.274549e-02 0.7608271 0.16392087 0.04625530 0.11604823
## 27      0 3.764936e-02 0.7658271 0.17248279 0.04763667 0.11882449
## 28      0 4.328761e-02 0.7658271 0.17248279 0.04763667 0.11882449
## 29      0 4.977024e-02 0.7705890 0.17610348 0.04416558 0.11031293
## 30      0 5.722368e-02 0.7755890 0.18301109 0.03763345 0.10745519
## 31      0 6.579332e-02 0.7855890 0.20113958 0.04298410 0.12021667
## 32      0 7.564633e-02 0.7855890 0.20113958 0.04298410 0.12021667
## 33      0 8.697490e-02 0.7905890 0.21209515 0.04751584 0.12933289
## 34      0 1.000000e-01 0.7955890 0.21987068 0.04535159 0.12668901
## 35      0 1.149757e-01 0.8008521 0.20297772 0.04057927 0.15575369
## 36      0 1.321941e-01 0.8058521 0.21253567 0.04344202 0.16238278
## 37      0 1.519911e-01 0.8108521 0.22349124 0.04552126 0.16842111
## 38      0 1.747528e-01 0.8108521 0.19656928 0.03894393 0.17548037
## 39      0 2.009233e-01 0.8108521 0.19656928 0.03894393 0.17548037
## 40      0 2.310130e-01 0.8163784 0.18596636 0.04534937 0.19926712
## 41      0 2.656088e-01 0.8163784 0.18596636 0.04534937 0.19926712
## 42      0 3.053856e-01 0.8211404 0.19096636 0.04131442 0.19322406
## 43      0 3.511192e-01 0.8211404 0.16573204 0.04131442 0.19856117
## 44      0 4.037017e-01 0.8261404 0.17985981 0.04772362 0.21496195
## 45      0 4.641589e-01 0.8311404 0.18738669 0.04100659 0.20617419
## 46      0 5.336699e-01 0.8311404 0.18738669 0.04100659 0.20617419
## 47      0 6.135907e-01 0.8263784 0.11668203 0.03282657 0.18992796
## 48      0 7.054802e-01 0.8263784 0.11668203 0.03282657 0.18992796
## 49      0 8.111308e-01 0.8263784 0.11668203 0.03282657 0.18992796
## 50      0 9.326033e-01 0.8263784 0.11668203 0.03282657 0.18992796
## 51      0 1.072267e+00 0.8211153 0.07096774 0.02356010 0.14961314
## 52      0 1.232847e+00 0.8211153 0.07096774 0.02356010 0.14961314
## 53      0 1.417474e+00 0.8211153 0.07096774 0.02356010 0.14961314
## 54      0 1.629751e+00 0.8161153 0.03548387 0.02200312 0.11220985
## 55      0 1.873817e+00 0.8161153 0.03548387 0.02200312 0.11220985
## 56      0 2.154435e+00 0.8111153 0.00000000 0.01891138 0.00000000
## 57      0 2.477076e+00 0.8111153 0.00000000 0.01891138 0.00000000
## 58      0 2.848036e+00 0.8111153 0.00000000 0.01891138 0.00000000
## 59      0 3.274549e+00 0.8111153 0.00000000 0.01891138 0.00000000
## 60      0 3.764936e+00 0.8111153 0.00000000 0.01891138 0.00000000
## 61      0 4.328761e+00 0.8111153 0.00000000 0.01891138 0.00000000
## 62      0 4.977024e+00 0.8111153 0.00000000 0.01891138 0.00000000
## 63      0 5.722368e+00 0.8111153 0.00000000 0.01891138 0.00000000
## 64      0 6.579332e+00 0.8111153 0.00000000 0.01891138 0.00000000
## 65      0 7.564633e+00 0.8111153 0.00000000 0.01891138 0.00000000
## 66      0 8.697490e+00 0.8111153 0.00000000 0.01891138 0.00000000
## 67      0 1.000000e+01 0.8111153 0.00000000 0.01891138 0.00000000
## 68      0 1.149757e+01 0.8111153 0.00000000 0.01891138 0.00000000
## 69      0 1.321941e+01 0.8111153 0.00000000 0.01891138 0.00000000
## 70      0 1.519911e+01 0.8111153 0.00000000 0.01891138 0.00000000
## 71      0 1.747528e+01 0.8111153 0.00000000 0.01891138 0.00000000
## 72      0 2.009233e+01 0.8111153 0.00000000 0.01891138 0.00000000
## 73      0 2.310130e+01 0.8111153 0.00000000 0.01891138 0.00000000
## 74      0 2.656088e+01 0.8111153 0.00000000 0.01891138 0.00000000
## 75      0 3.053856e+01 0.8111153 0.00000000 0.01891138 0.00000000
## 76      0 3.511192e+01 0.8111153 0.00000000 0.01891138 0.00000000
## 77      0 4.037017e+01 0.8111153 0.00000000 0.01891138 0.00000000
## 78      0 4.641589e+01 0.8111153 0.00000000 0.01891138 0.00000000
## 79      0 5.336699e+01 0.8111153 0.00000000 0.01891138 0.00000000
## 80      0 6.135907e+01 0.8111153 0.00000000 0.01891138 0.00000000
## 81      0 7.054802e+01 0.8111153 0.00000000 0.01891138 0.00000000
## 82      0 8.111308e+01 0.8111153 0.00000000 0.01891138 0.00000000
## 83      0 9.326033e+01 0.8111153 0.00000000 0.01891138 0.00000000
## 84      0 1.072267e+02 0.8111153 0.00000000 0.01891138 0.00000000
## 85      0 1.232847e+02 0.8111153 0.00000000 0.01891138 0.00000000
## 86      0 1.417474e+02 0.8111153 0.00000000 0.01891138 0.00000000
## 87      0 1.629751e+02 0.8111153 0.00000000 0.01891138 0.00000000
## 88      0 1.873817e+02 0.8111153 0.00000000 0.01891138 0.00000000
## 89      0 2.154435e+02 0.8111153 0.00000000 0.01891138 0.00000000
## 90      0 2.477076e+02 0.8111153 0.00000000 0.01891138 0.00000000
## 91      0 2.848036e+02 0.8111153 0.00000000 0.01891138 0.00000000
## 92      0 3.274549e+02 0.8111153 0.00000000 0.01891138 0.00000000
## 93      0 3.764936e+02 0.8111153 0.00000000 0.01891138 0.00000000
## 94      0 4.328761e+02 0.8111153 0.00000000 0.01891138 0.00000000
## 95      0 4.977024e+02 0.8111153 0.00000000 0.01891138 0.00000000
## 96      0 5.722368e+02 0.8111153 0.00000000 0.01891138 0.00000000
## 97      0 6.579332e+02 0.8111153 0.00000000 0.01891138 0.00000000
## 98      0 7.564633e+02 0.8111153 0.00000000 0.01891138 0.00000000
## 99      0 8.697490e+02 0.8111153 0.00000000 0.01891138 0.00000000
## 100     0 1.000000e+03 0.8111153 0.00000000 0.01891138 0.00000000
#Visualize accuracy versus values of C
plot(model_ridge)

#Obtain metrics of accuracy from training
confusionMatrix(model_ridge)
## Cross-Validated (10 fold) Confusion Matrix 
## 
## (entries are percentual average cell counts across resamples)
##  
##           Reference
## Prediction    1    2    3
##          1  2.5  0.5  0.0
##          2 12.4 80.6  4.0
##          3  0.0  0.0  0.0
##                             
##  Accuracy (average) : 0.8308
varImp(model_ridge)
## glmnet variable importance
## 
##   variables are sorted by maximum importance across the classes
##   only 20 most important variables shown (out of 59)
## 
##             1      2      3
## q178   18.778 100.00 81.222
## q416   94.318  85.21  9.105
## q873   17.475  79.09 61.615
## q475   78.551  66.80 11.746
## q897   21.185  55.47 76.655
## q175   59.375  52.82  6.558
## q214   56.699  45.54 11.155
## q417   35.765  19.62 55.383
## q654    3.969  47.54 43.573
## q176   32.883  42.51  9.628
## q496   41.558  28.76 12.797
## q522   39.895  24.12 15.776
## q473   27.187  34.59  7.407
## q894   33.468  22.92 10.551
## q495   32.225  22.63  9.595
## race42 21.338  31.66 10.320
## q494   31.530  21.43 10.100
## q532   30.758  17.14 13.618
## grade2 29.046  20.91  8.139
## q895   20.773  27.23  6.461
plot(varImp(model_ridge))

var_import = varImp(model_ridge)$importance

Oklahoma (OK)

df_ok = df_fit_state %>% 
  filter(sitename == "Oklahoma (OK)") %>% 
  select(-sitename, -year)


set.seed(123)
#Create grid to search lambda
lambda <- 10^seq(-3,3, length = 100)

# Specify training control
train_control_ridge <- trainControl(method = "cv", number = 10)

model_ridge = train(q85 ~.,
                    df_ok, 
                    method = "glmnet", 
                    trControl = train_control_ridge, 
                    tuneGrid = expand.grid(alpha = 0, lambda = lambda))

summary(model_ridge)
##             Length Class      Mode     
## a0          300    -none-     numeric  
## beta          3    -none-     list     
## dfmat       300    -none-     numeric  
## df          100    -none-     numeric  
## dim           2    -none-     numeric  
## lambda      100    -none-     numeric  
## dev.ratio   100    -none-     numeric  
## nulldev       1    -none-     numeric  
## npasses       1    -none-     numeric  
## jerr          1    -none-     numeric  
## offset        1    -none-     logical  
## classnames    3    -none-     character
## grouped       1    -none-     logical  
## call          5    -none-     call     
## nobs          1    -none-     numeric  
## lambdaOpt     1    -none-     numeric  
## xNames       59    -none-     character
## problemType   1    -none-     character
## tuneValue     2    data.frame list     
## obsLevels     3    -none-     character
## param         0    -none-     list
model_ridge$bestTune
##    alpha   lambda
## 52     0 1.232847
model_ridge$results
##     alpha       lambda  Accuracy         Kappa AccuracySD    KappaSD
## 1       0 1.000000e-03 0.6753595  0.0051490949 0.05858083 0.13772629
## 2       0 1.149757e-03 0.6753595  0.0051490949 0.05858083 0.13772629
## 3       0 1.321941e-03 0.6753595  0.0051490949 0.05858083 0.13772629
## 4       0 1.519911e-03 0.6753595  0.0051490949 0.05858083 0.13772629
## 5       0 1.747528e-03 0.6753595  0.0051490949 0.05858083 0.13772629
## 6       0 2.009233e-03 0.6753595  0.0051490949 0.05858083 0.13772629
## 7       0 2.310130e-03 0.6753595  0.0051490949 0.05858083 0.13772629
## 8       0 2.656088e-03 0.6753595  0.0051490949 0.05858083 0.13772629
## 9       0 3.053856e-03 0.6753595  0.0051490949 0.05858083 0.13772629
## 10      0 3.511192e-03 0.6753595  0.0051490949 0.05858083 0.13772629
## 11      0 4.037017e-03 0.6753595  0.0051490949 0.05858083 0.13772629
## 12      0 4.641589e-03 0.6753595  0.0051490949 0.05858083 0.13772629
## 13      0 5.336699e-03 0.6753595  0.0051490949 0.05858083 0.13772629
## 14      0 6.135907e-03 0.6753595  0.0051490949 0.05858083 0.13772629
## 15      0 7.054802e-03 0.6753595  0.0051490949 0.05858083 0.13772629
## 16      0 8.111308e-03 0.6782166  0.0079268727 0.05828966 0.13789093
## 17      0 9.326033e-03 0.6811578  0.0102078018 0.05557257 0.13733155
## 18      0 1.072267e-02 0.6840149  0.0047214503 0.05655858 0.13472993
## 19      0 1.232847e-02 0.6897339  0.0117474892 0.05664585 0.13678092
## 20      0 1.417474e-02 0.6897339  0.0117474892 0.05664585 0.13678092
## 21      0 1.629751e-02 0.6925117  0.0156252740 0.05606712 0.13674027
## 22      0 1.873817e-02 0.6867927 -0.0074197256 0.05978444 0.14637143
## 23      0 2.154435e-02 0.6838515 -0.0228975459 0.05782458 0.13809490
## 24      0 2.477076e-02 0.6808310 -0.0503384549 0.05085364 0.09755483
## 25      0 2.848036e-02 0.6808310 -0.0503384549 0.05085364 0.09755483
## 26      0 3.274549e-02 0.6896545 -0.0409325143 0.04610573 0.09740535
## 27      0 3.764936e-02 0.6896545 -0.0409325143 0.04610573 0.09740535
## 28      0 4.328761e-02 0.7040289 -0.0226249283 0.04219046 0.10027612
## 29      0 4.977024e-02 0.7069701 -0.0129689725 0.04550701 0.11023699
## 30      0 5.722368e-02 0.7127684 -0.0047981870 0.04355317 0.10655151
## 31      0 6.579332e-02 0.7215079  0.0198887772 0.05286144 0.14793177
## 32      0 7.564633e-02 0.7243651  0.0233560792 0.04967190 0.14718306
## 33      0 8.697490e-02 0.7273063  0.0255581870 0.04733674 0.14465025
## 34      0 1.000000e-01 0.7303268  0.0192264917 0.04660468 0.14003503
## 35      0 1.149757e-01 0.7303268  0.0125088943 0.04660468 0.14163689
## 36      0 1.321941e-01 0.7359617  0.0102533900 0.04006671 0.09840697
## 37      0 1.519911e-01 0.7359617  0.0102533900 0.04006671 0.09840697
## 38      0 1.747528e-01 0.7358777 -0.0051417922 0.03306082 0.07898265
## 39      0 2.009233e-01 0.7387348 -0.0008246807 0.03221116 0.08272210
## 40      0 2.310130e-01 0.7416760  0.0033551577 0.03015529 0.07868755
## 41      0 2.656088e-01 0.7446172  0.0081496782 0.03088908 0.07664871
## 42      0 3.053856e-01 0.7474743  0.0124244149 0.02572511 0.07154862
## 43      0 3.511192e-01 0.7531886  0.0228658007 0.03049067 0.08840404
## 44      0 4.037017e-01 0.7531886  0.0228658007 0.03049067 0.08840404
## 45      0 4.641589e-01 0.7531886  0.0228658007 0.03049067 0.08840404
## 46      0 5.336699e-01 0.7531886  0.0228658007 0.03049067 0.08840404
## 47      0 6.135907e-01 0.7560458  0.0276562199 0.02764618 0.08538959
## 48      0 7.054802e-01 0.7560458  0.0276562199 0.02764618 0.08538959
## 49      0 8.111308e-01 0.7560458  0.0276562199 0.02764618 0.08538959
## 50      0 9.326033e-01 0.7560458  0.0138419879 0.02003745 0.06186633
## 51      0 1.072267e+00 0.7588235  0.0184931507 0.01642780 0.05848048
## 52      0 1.232847e+00 0.7588235  0.0184931507 0.01642780 0.05848048
## 53      0 1.417474e+00 0.7558824  0.0000000000 0.01121148 0.00000000
## 54      0 1.629751e+00 0.7558824  0.0000000000 0.01121148 0.00000000
## 55      0 1.873817e+00 0.7558824  0.0000000000 0.01121148 0.00000000
## 56      0 2.154435e+00 0.7558824  0.0000000000 0.01121148 0.00000000
## 57      0 2.477076e+00 0.7558824  0.0000000000 0.01121148 0.00000000
## 58      0 2.848036e+00 0.7558824  0.0000000000 0.01121148 0.00000000
## 59      0 3.274549e+00 0.7558824  0.0000000000 0.01121148 0.00000000
## 60      0 3.764936e+00 0.7558824  0.0000000000 0.01121148 0.00000000
## 61      0 4.328761e+00 0.7558824  0.0000000000 0.01121148 0.00000000
## 62      0 4.977024e+00 0.7558824  0.0000000000 0.01121148 0.00000000
## 63      0 5.722368e+00 0.7558824  0.0000000000 0.01121148 0.00000000
## 64      0 6.579332e+00 0.7558824  0.0000000000 0.01121148 0.00000000
## 65      0 7.564633e+00 0.7558824  0.0000000000 0.01121148 0.00000000
## 66      0 8.697490e+00 0.7558824  0.0000000000 0.01121148 0.00000000
## 67      0 1.000000e+01 0.7558824  0.0000000000 0.01121148 0.00000000
## 68      0 1.149757e+01 0.7558824  0.0000000000 0.01121148 0.00000000
## 69      0 1.321941e+01 0.7558824  0.0000000000 0.01121148 0.00000000
## 70      0 1.519911e+01 0.7558824  0.0000000000 0.01121148 0.00000000
## 71      0 1.747528e+01 0.7558824  0.0000000000 0.01121148 0.00000000
## 72      0 2.009233e+01 0.7558824  0.0000000000 0.01121148 0.00000000
## 73      0 2.310130e+01 0.7558824  0.0000000000 0.01121148 0.00000000
## 74      0 2.656088e+01 0.7558824  0.0000000000 0.01121148 0.00000000
## 75      0 3.053856e+01 0.7558824  0.0000000000 0.01121148 0.00000000
## 76      0 3.511192e+01 0.7558824  0.0000000000 0.01121148 0.00000000
## 77      0 4.037017e+01 0.7558824  0.0000000000 0.01121148 0.00000000
## 78      0 4.641589e+01 0.7558824  0.0000000000 0.01121148 0.00000000
## 79      0 5.336699e+01 0.7558824  0.0000000000 0.01121148 0.00000000
## 80      0 6.135907e+01 0.7558824  0.0000000000 0.01121148 0.00000000
## 81      0 7.054802e+01 0.7558824  0.0000000000 0.01121148 0.00000000
## 82      0 8.111308e+01 0.7558824  0.0000000000 0.01121148 0.00000000
## 83      0 9.326033e+01 0.7558824  0.0000000000 0.01121148 0.00000000
## 84      0 1.072267e+02 0.7558824  0.0000000000 0.01121148 0.00000000
## 85      0 1.232847e+02 0.7558824  0.0000000000 0.01121148 0.00000000
## 86      0 1.417474e+02 0.7558824  0.0000000000 0.01121148 0.00000000
## 87      0 1.629751e+02 0.7558824  0.0000000000 0.01121148 0.00000000
## 88      0 1.873817e+02 0.7558824  0.0000000000 0.01121148 0.00000000
## 89      0 2.154435e+02 0.7558824  0.0000000000 0.01121148 0.00000000
## 90      0 2.477076e+02 0.7558824  0.0000000000 0.01121148 0.00000000
## 91      0 2.848036e+02 0.7558824  0.0000000000 0.01121148 0.00000000
## 92      0 3.274549e+02 0.7558824  0.0000000000 0.01121148 0.00000000
## 93      0 3.764936e+02 0.7558824  0.0000000000 0.01121148 0.00000000
## 94      0 4.328761e+02 0.7558824  0.0000000000 0.01121148 0.00000000
## 95      0 4.977024e+02 0.7558824  0.0000000000 0.01121148 0.00000000
## 96      0 5.722368e+02 0.7558824  0.0000000000 0.01121148 0.00000000
## 97      0 6.579332e+02 0.7558824  0.0000000000 0.01121148 0.00000000
## 98      0 7.564633e+02 0.7558824  0.0000000000 0.01121148 0.00000000
## 99      0 8.697490e+02 0.7558824  0.0000000000 0.01121148 0.00000000
## 100     0 1.000000e+03 0.7558824  0.0000000000 0.01121148 0.00000000
#Visualize accuracy versus values of C
plot(model_ridge)

#Obtain metrics of accuracy from training
confusionMatrix(model_ridge)
## Cross-Validated (10 fold) Confusion Matrix 
## 
## (entries are percentual average cell counts across resamples)
##  
##           Reference
## Prediction    1    2    3
##          1  0.3  0.0  0.0
##          2 18.7 75.6  5.5
##          3  0.0  0.0  0.0
##                             
##  Accuracy (average) : 0.7586
varImp(model_ridge)
## glmnet variable importance
## 
##   variables are sorted by maximum importance across the classes
##   only 20 most important variables shown (out of 59)
## 
##              1     2      3
## q176   100.000 81.70 18.299
## q178    34.919 64.25 29.328
## q894     7.407 58.23 50.823
## q177    53.650 40.55 13.100
## q175    41.326 53.24 11.917
## q496    18.120 44.26 26.137
## age3    43.940 30.99 12.948
## q474    40.598 42.98  2.378
## q896    32.417 42.62 10.202
## q502    35.601 38.37  2.773
## q213    24.917 36.88 11.964
## q214    22.794 35.46 12.663
## q897    35.383 22.51 12.875
## q495    21.002 32.05 11.049
## race42  19.561 31.41 11.854
## q895    15.616 30.51 14.891
## q416    27.456 14.34 13.119
## q415     8.562 26.65 18.085
## q475    10.114 16.08 26.195
## q476    25.706 21.98  3.728
plot(varImp(model_ridge))

var_import = varImp(model_ridge)$importance

Pennsylvania (PA)

df_pa = df_fit_state %>% 
  filter(sitename == "Pennsylvania (PA)") %>% 
  select(-sitename, -year)


set.seed(123)
#Create grid to search lambda
lambda <- 10^seq(-3,3, length = 100)

# Specify training control
train_control_ridge <- trainControl(method = "cv", number = 10)

model_ridge = train(q85 ~.,
                    df_pa, 
                    method = "glmnet", 
                    trControl = train_control_ridge, 
                    tuneGrid = expand.grid(alpha = 0, lambda = lambda))

summary(model_ridge)
##             Length Class      Mode     
## a0          300    -none-     numeric  
## beta          3    -none-     list     
## dfmat       300    -none-     numeric  
## df          100    -none-     numeric  
## dim           2    -none-     numeric  
## lambda      100    -none-     numeric  
## dev.ratio   100    -none-     numeric  
## nulldev       1    -none-     numeric  
## npasses       1    -none-     numeric  
## jerr          1    -none-     numeric  
## offset        1    -none-     logical  
## classnames    3    -none-     character
## grouped       1    -none-     logical  
## call          5    -none-     call     
## nobs          1    -none-     numeric  
## lambdaOpt     1    -none-     numeric  
## xNames       59    -none-     character
## problemType   1    -none-     character
## tuneValue     2    data.frame list     
## obsLevels     3    -none-     character
## param         0    -none-     list
model_ridge$bestTune
##    alpha    lambda
## 43     0 0.3511192
model_ridge$results
##     alpha       lambda  Accuracy        Kappa  AccuracySD     KappaSD
## 1       0 1.000000e-03 0.7273174  0.200498330 0.029509772 0.064108316
## 2       0 1.149757e-03 0.7273174  0.200498330 0.029509772 0.064108316
## 3       0 1.321941e-03 0.7273174  0.200498330 0.029509772 0.064108316
## 4       0 1.519911e-03 0.7273174  0.200498330 0.029509772 0.064108316
## 5       0 1.747528e-03 0.7273174  0.200498330 0.029509772 0.064108316
## 6       0 2.009233e-03 0.7273174  0.200498330 0.029509772 0.064108316
## 7       0 2.310130e-03 0.7273174  0.200498330 0.029509772 0.064108316
## 8       0 2.656088e-03 0.7273174  0.200498330 0.029509772 0.064108316
## 9       0 3.053856e-03 0.7273174  0.200498330 0.029509772 0.064108316
## 10      0 3.511192e-03 0.7273174  0.200498330 0.029509772 0.064108316
## 11      0 4.037017e-03 0.7273174  0.200498330 0.029509772 0.064108316
## 12      0 4.641589e-03 0.7273174  0.200498330 0.029509772 0.064108316
## 13      0 5.336699e-03 0.7273174  0.200498330 0.029509772 0.064108316
## 14      0 6.135907e-03 0.7273174  0.200498330 0.029509772 0.064108316
## 15      0 7.054802e-03 0.7273174  0.200498330 0.029509772 0.064108316
## 16      0 8.111308e-03 0.7273174  0.200498330 0.029509772 0.064108316
## 17      0 9.326033e-03 0.7249918  0.190364787 0.028943622 0.064193483
## 18      0 1.072267e-02 0.7226662  0.179391158 0.030212686 0.080304788
## 19      0 1.232847e-02 0.7272140  0.186066113 0.030468161 0.077783609
## 20      0 1.417474e-02 0.7295396  0.189529482 0.028805082 0.081701757
## 21      0 1.629751e-02 0.7295396  0.189529482 0.028805082 0.081701757
## 22      0 1.873817e-02 0.7295396  0.189529482 0.028805082 0.081701757
## 23      0 2.154435e-02 0.7341379  0.197281249 0.029458141 0.087422932
## 24      0 2.477076e-02 0.7364106  0.195201672 0.033381594 0.085690551
## 25      0 2.848036e-02 0.7409561  0.202496327 0.031608101 0.085401569
## 26      0 3.274549e-02 0.7409561  0.202496327 0.031608101 0.085401569
## 27      0 3.764936e-02 0.7409561  0.202496327 0.031608101 0.085401569
## 28      0 4.328761e-02 0.7364611  0.182317434 0.034650831 0.098781661
## 29      0 4.977024e-02 0.7342412  0.163387376 0.033956769 0.098622529
## 30      0 5.722368e-02 0.7388396  0.171248698 0.035542267 0.102947425
## 31      0 6.579332e-02 0.7388396  0.171248698 0.035542267 0.102947425
## 32      0 7.564633e-02 0.7388396  0.171248698 0.035542267 0.102947425
## 33      0 8.697490e-02 0.7411651  0.175593516 0.038277298 0.111183913
## 34      0 1.000000e-01 0.7456601  0.179280571 0.035023503 0.113348306
## 35      0 1.149757e-01 0.7433874  0.166987351 0.035445450 0.124865785
## 36      0 1.321941e-01 0.7433369  0.160678941 0.034290710 0.120137161
## 37      0 1.519911e-01 0.7388419  0.139922449 0.034155947 0.115446225
## 38      0 1.747528e-01 0.7388419  0.139922449 0.034155947 0.115446225
## 39      0 2.009233e-01 0.7456601  0.145645688 0.027703952 0.115774941
## 40      0 2.310130e-01 0.7525311  0.142758416 0.032430457 0.139744868
## 41      0 2.656088e-01 0.7525311  0.142758416 0.032430457 0.139744868
## 42      0 3.053856e-01 0.7525311  0.137164234 0.032430457 0.127812379
## 43      0 3.511192e-01 0.7525311  0.132890730 0.032430457 0.127907036
## 44      0 4.037017e-01 0.7501550  0.106760280 0.028695155 0.103536612
## 45      0 4.641589e-01 0.7433369  0.061427059 0.028835745 0.109302794
## 46      0 5.336699e-01 0.7387385  0.036901470 0.020433101 0.062587116
## 47      0 6.135907e-01 0.7410641  0.030662504 0.022355030 0.074880111
## 48      0 7.054802e-01 0.7387914  0.017106512 0.019785990 0.065457663
## 49      0 8.111308e-01 0.7364658  0.003625627 0.017248313 0.050754091
## 50      0 9.326033e-01 0.7341402 -0.009855258 0.013838522 0.021526634
## 51      0 1.072267e+00 0.7341402 -0.009855258 0.013838522 0.021526634
## 52      0 1.232847e+00 0.7364130 -0.006122449 0.014438600 0.019360884
## 53      0 1.417474e+00 0.7386857 -0.002723735 0.009958867 0.008613208
## 54      0 1.629751e+00 0.7386857 -0.002723735 0.009958867 0.008613208
## 55      0 1.873817e+00 0.7386857 -0.002723735 0.009958867 0.008613208
## 56      0 2.154435e+00 0.7386857 -0.002723735 0.009958867 0.008613208
## 57      0 2.477076e+00 0.7386857 -0.002723735 0.009958867 0.008613208
## 58      0 2.848036e+00 0.7386857 -0.002723735 0.009958867 0.008613208
## 59      0 3.274549e+00 0.7409584  0.000000000 0.009653532 0.000000000
## 60      0 3.764936e+00 0.7409584  0.000000000 0.009653532 0.000000000
## 61      0 4.328761e+00 0.7409584  0.000000000 0.009653532 0.000000000
## 62      0 4.977024e+00 0.7409584  0.000000000 0.009653532 0.000000000
## 63      0 5.722368e+00 0.7409584  0.000000000 0.009653532 0.000000000
## 64      0 6.579332e+00 0.7409584  0.000000000 0.009653532 0.000000000
## 65      0 7.564633e+00 0.7409584  0.000000000 0.009653532 0.000000000
## 66      0 8.697490e+00 0.7409584  0.000000000 0.009653532 0.000000000
## 67      0 1.000000e+01 0.7409584  0.000000000 0.009653532 0.000000000
## 68      0 1.149757e+01 0.7409584  0.000000000 0.009653532 0.000000000
## 69      0 1.321941e+01 0.7409584  0.000000000 0.009653532 0.000000000
## 70      0 1.519911e+01 0.7409584  0.000000000 0.009653532 0.000000000
## 71      0 1.747528e+01 0.7409584  0.000000000 0.009653532 0.000000000
## 72      0 2.009233e+01 0.7409584  0.000000000 0.009653532 0.000000000
## 73      0 2.310130e+01 0.7409584  0.000000000 0.009653532 0.000000000
## 74      0 2.656088e+01 0.7409584  0.000000000 0.009653532 0.000000000
## 75      0 3.053856e+01 0.7409584  0.000000000 0.009653532 0.000000000
## 76      0 3.511192e+01 0.7409584  0.000000000 0.009653532 0.000000000
## 77      0 4.037017e+01 0.7409584  0.000000000 0.009653532 0.000000000
## 78      0 4.641589e+01 0.7409584  0.000000000 0.009653532 0.000000000
## 79      0 5.336699e+01 0.7409584  0.000000000 0.009653532 0.000000000
## 80      0 6.135907e+01 0.7409584  0.000000000 0.009653532 0.000000000
## 81      0 7.054802e+01 0.7409584  0.000000000 0.009653532 0.000000000
## 82      0 8.111308e+01 0.7409584  0.000000000 0.009653532 0.000000000
## 83      0 9.326033e+01 0.7409584  0.000000000 0.009653532 0.000000000
## 84      0 1.072267e+02 0.7409584  0.000000000 0.009653532 0.000000000
## 85      0 1.232847e+02 0.7409584  0.000000000 0.009653532 0.000000000
## 86      0 1.417474e+02 0.7409584  0.000000000 0.009653532 0.000000000
## 87      0 1.629751e+02 0.7409584  0.000000000 0.009653532 0.000000000
## 88      0 1.873817e+02 0.7409584  0.000000000 0.009653532 0.000000000
## 89      0 2.154435e+02 0.7409584  0.000000000 0.009653532 0.000000000
## 90      0 2.477076e+02 0.7409584  0.000000000 0.009653532 0.000000000
## 91      0 2.848036e+02 0.7409584  0.000000000 0.009653532 0.000000000
## 92      0 3.274549e+02 0.7409584  0.000000000 0.009653532 0.000000000
## 93      0 3.764936e+02 0.7409584  0.000000000 0.009653532 0.000000000
## 94      0 4.328761e+02 0.7409584  0.000000000 0.009653532 0.000000000
## 95      0 4.977024e+02 0.7409584  0.000000000 0.009653532 0.000000000
## 96      0 5.722368e+02 0.7409584  0.000000000 0.009653532 0.000000000
## 97      0 6.579332e+02 0.7409584  0.000000000 0.009653532 0.000000000
## 98      0 7.564633e+02 0.7409584  0.000000000 0.009653532 0.000000000
## 99      0 8.697490e+02 0.7409584  0.000000000 0.009653532 0.000000000
## 100     0 1.000000e+03 0.7409584  0.000000000 0.009653532 0.000000000
#Visualize accuracy versus values of C
plot(model_ridge)

#Obtain metrics of accuracy from training
confusionMatrix(model_ridge)
## Cross-Validated (10 fold) Confusion Matrix 
## 
## (entries are percentual average cell counts across resamples)
##  
##           Reference
## Prediction    1    2    3
##          1  2.7  1.4  0.2
##          2 15.9 72.5  6.8
##          3  0.2  0.2  0.0
##                             
##  Accuracy (average) : 0.7523
varImp(model_ridge)
## glmnet variable importance
## 
##   variables are sorted by maximum importance across the classes
##   only 20 most important variables shown (out of 59)
## 
##             1       2      3
## q177   100.00 81.6859 18.314
## q176    55.77 72.7734 17.004
## q895    53.65 70.9263 17.276
## q495    41.97 61.9460 19.979
## q216    58.17  2.4824 60.649
## q502     5.57 55.7854 50.216
## q175    44.39 55.6076 11.215
## q215    40.68 48.3019  7.625
## q894    47.06 43.5931  3.465
## race42  16.10 39.4043 23.302
## q415    30.88 37.8966  7.020
## q532    15.55 37.3217 21.774
## race43  36.68 25.0667 11.610
## q214    35.77 15.7085 20.065
## q413    35.64 32.7126  2.930
## q493    30.85  0.8222 31.676
## q174    30.47  0.7405 31.208
## age7    28.99 22.4740  6.518
## q414     7.75 27.7310 19.981
## q873    12.77 11.9715 24.746
plot(varImp(model_ridge))

var_importance <- varImp(model_ridge)

var_importance$importance %>% 
  janitor::clean_names() %>% 
  filter(x1 > 50) %>% 
  select(x1) %>% 
  arrange(desc(x1))
##             x1
## q177 100.00000
## q216  58.16652
## q176  55.76975
## q895  53.65049
var_importance$importance %>% 
  janitor::clean_names() %>% 
  filter(x2 > 50) %>% 
  select(x2) %>% 
  arrange(desc(x2))
##            x2
## q177 81.68592
## q176 72.77340
## q895 70.92627
## q495 61.94604
## q502 55.78542
## q175 55.60762
var_importance$importance %>% 
  janitor::clean_names() %>% 
  filter(x3 > 50) %>% 
  select(x3) %>% 
  arrange(desc(x3))
##            x3
## q216 60.64893
## q502 50.21553
var_import = varImp(model_ridge)$importance

South Carolina (SC)

df_sc = df_fit_state %>% 
  filter(sitename == "South Carolina (SC)") %>% 
  select(-sitename, -year)


set.seed(123)
#Create grid to search lambda
lambda <- 10^seq(-3,3, length = 100)

# Specify training control
train_control_ridge <- trainControl(method = "cv", number = 10)

model_ridge = train(q85 ~.,
                    df_sc, 
                    method = "glmnet", 
                    trControl = train_control_ridge, 
                    tuneGrid = expand.grid(alpha = 0, lambda = lambda))
## Warning in lognet(xd, is.sparse, ix, jx, y, weights, offset, alpha, nobs, : one
## multinomial or binomial class has fewer than 8 observations; dangerous ground

## Warning in lognet(xd, is.sparse, ix, jx, y, weights, offset, alpha, nobs, : one
## multinomial or binomial class has fewer than 8 observations; dangerous ground

## Warning in lognet(xd, is.sparse, ix, jx, y, weights, offset, alpha, nobs, : one
## multinomial or binomial class has fewer than 8 observations; dangerous ground

## Warning in lognet(xd, is.sparse, ix, jx, y, weights, offset, alpha, nobs, : one
## multinomial or binomial class has fewer than 8 observations; dangerous ground

## Warning in lognet(xd, is.sparse, ix, jx, y, weights, offset, alpha, nobs, : one
## multinomial or binomial class has fewer than 8 observations; dangerous ground

## Warning in lognet(xd, is.sparse, ix, jx, y, weights, offset, alpha, nobs, : one
## multinomial or binomial class has fewer than 8 observations; dangerous ground

## Warning in lognet(xd, is.sparse, ix, jx, y, weights, offset, alpha, nobs, : one
## multinomial or binomial class has fewer than 8 observations; dangerous ground

## Warning in lognet(xd, is.sparse, ix, jx, y, weights, offset, alpha, nobs, : one
## multinomial or binomial class has fewer than 8 observations; dangerous ground

## Warning in lognet(xd, is.sparse, ix, jx, y, weights, offset, alpha, nobs, : one
## multinomial or binomial class has fewer than 8 observations; dangerous ground

## Warning in lognet(xd, is.sparse, ix, jx, y, weights, offset, alpha, nobs, : one
## multinomial or binomial class has fewer than 8 observations; dangerous ground

## Warning in lognet(xd, is.sparse, ix, jx, y, weights, offset, alpha, nobs, : one
## multinomial or binomial class has fewer than 8 observations; dangerous ground
summary(model_ridge)
##             Length Class      Mode     
## a0          300    -none-     numeric  
## beta          3    -none-     list     
## dfmat       300    -none-     numeric  
## df          100    -none-     numeric  
## dim           2    -none-     numeric  
## lambda      100    -none-     numeric  
## dev.ratio   100    -none-     numeric  
## nulldev       1    -none-     numeric  
## npasses       1    -none-     numeric  
## jerr          1    -none-     numeric  
## offset        1    -none-     logical  
## classnames    3    -none-     character
## grouped       1    -none-     logical  
## call          5    -none-     call     
## nobs          1    -none-     numeric  
## lambdaOpt     1    -none-     numeric  
## xNames       59    -none-     character
## problemType   1    -none-     character
## tuneValue     2    data.frame list     
## obsLevels     3    -none-     character
## param         0    -none-     list
model_ridge$bestTune
##     alpha lambda
## 100     0   1000
model_ridge$results
##     alpha       lambda  Accuracy        Kappa AccuracySD    KappaSD
## 1       0 1.000000e-03 0.6366300 -0.051363912 0.09988432 0.11190004
## 2       0 1.149757e-03 0.6366300 -0.051363912 0.09988432 0.11190004
## 3       0 1.321941e-03 0.6366300 -0.051363912 0.09988432 0.11190004
## 4       0 1.519911e-03 0.6366300 -0.051363912 0.09988432 0.11190004
## 5       0 1.747528e-03 0.6366300 -0.051363912 0.09988432 0.11190004
## 6       0 2.009233e-03 0.6366300 -0.051363912 0.09988432 0.11190004
## 7       0 2.310130e-03 0.6366300 -0.051363912 0.09988432 0.11190004
## 8       0 2.656088e-03 0.6366300 -0.051363912 0.09988432 0.11190004
## 9       0 3.053856e-03 0.6366300 -0.051363912 0.09988432 0.11190004
## 10      0 3.511192e-03 0.6366300 -0.051363912 0.09988432 0.11190004
## 11      0 4.037017e-03 0.6366300 -0.051363912 0.09988432 0.11190004
## 12      0 4.641589e-03 0.6366300 -0.051363912 0.09988432 0.11190004
## 13      0 5.336699e-03 0.6366300 -0.051363912 0.09988432 0.11190004
## 14      0 6.135907e-03 0.6366300 -0.051363912 0.09988432 0.11190004
## 15      0 7.054802e-03 0.6366300 -0.051363912 0.09988432 0.11190004
## 16      0 8.111308e-03 0.6366300 -0.051363912 0.09988432 0.11190004
## 17      0 9.326033e-03 0.6366300 -0.051363912 0.09988432 0.11190004
## 18      0 1.072267e-02 0.6366300 -0.051363912 0.09988432 0.11190004
## 19      0 1.232847e-02 0.6366300 -0.051363912 0.09988432 0.11190004
## 20      0 1.417474e-02 0.6366300 -0.051363912 0.09988432 0.11190004
## 21      0 1.629751e-02 0.6443223 -0.028000625 0.09428944 0.13885428
## 22      0 1.873817e-02 0.6282967 -0.076572054 0.07558931 0.11746689
## 23      0 2.154435e-02 0.6359890 -0.066797618 0.06906385 0.12587883
## 24      0 2.477076e-02 0.6513736 -0.050517247 0.07153552 0.15053220
## 25      0 2.848036e-02 0.6513736 -0.050517247 0.07153552 0.15053220
## 26      0 3.274549e-02 0.6590659 -0.038910104 0.07137122 0.16478055
## 27      0 3.764936e-02 0.6513736 -0.063346195 0.07153552 0.11774043
## 28      0 4.328761e-02 0.6673993 -0.045719900 0.06622567 0.13183439
## 29      0 4.977024e-02 0.6757326 -0.042358555 0.05935914 0.12851704
## 30      0 5.722368e-02 0.6834249 -0.029315077 0.06632163 0.12515997
## 31      0 6.579332e-02 0.6834249 -0.029315077 0.06632163 0.12515997
## 32      0 7.564633e-02 0.6834249 -0.029315077 0.06632163 0.12515997
## 33      0 8.697490e-02 0.6905678 -0.020224168 0.05595572 0.13070459
## 34      0 1.000000e-01 0.6977106 -0.009315077 0.05370228 0.14480301
## 35      0 1.149757e-01 0.7054029  0.004693544 0.05816608 0.17046182
## 36      0 1.321941e-01 0.7130952  0.018702164 0.06124671 0.19159974
## 37      0 1.519911e-01 0.7137363 -0.001556456 0.07002578 0.15309035
## 38      0 1.747528e-01 0.7149267 -0.039032317 0.06178141 0.14095521
## 39      0 2.009233e-01 0.7072344 -0.071790937 0.05899478 0.06253749
## 40      0 2.310130e-01 0.7072344 -0.071790937 0.05899478 0.06253749
## 41      0 2.656088e-01 0.7072344 -0.071790937 0.05899478 0.06253749
## 42      0 3.053856e-01 0.7143773 -0.060679826 0.05448603 0.06461143
## 43      0 3.511192e-01 0.7227106 -0.048179826 0.06574839 0.06285273
## 44      0 4.037017e-01 0.7310440 -0.036551919 0.06308284 0.05952207
## 45      0 4.641589e-01 0.7310440 -0.036551919 0.06308284 0.05952207
## 46      0 5.336699e-01 0.7310440 -0.036551919 0.06308284 0.05952207
## 47      0 6.135907e-01 0.7387363 -0.025913621 0.06252201 0.05498871
## 48      0 7.054802e-01 0.7387363 -0.025913621 0.06252201 0.05498871
## 49      0 8.111308e-01 0.7387363 -0.025913621 0.06252201 0.05498871
## 50      0 9.326033e-01 0.7387363 -0.025913621 0.06252201 0.05498871
## 51      0 1.072267e+00 0.7387363 -0.025913621 0.06252201 0.05498871
## 52      0 1.232847e+00 0.7470696 -0.011627907 0.05717363 0.03677067
## 53      0 1.417474e+00 0.7470696 -0.011627907 0.05717363 0.03677067
## 54      0 1.629751e+00 0.7470696 -0.011627907 0.05717363 0.03677067
## 55      0 1.873817e+00 0.7470696 -0.011627907 0.05717363 0.03677067
## 56      0 2.154435e+00 0.7470696 -0.011627907 0.05717363 0.03677067
## 57      0 2.477076e+00 0.7470696 -0.011627907 0.05717363 0.03677067
## 58      0 2.848036e+00 0.7470696 -0.011627907 0.05717363 0.03677067
## 59      0 3.274549e+00 0.7470696 -0.011627907 0.05717363 0.03677067
## 60      0 3.764936e+00 0.7470696 -0.011627907 0.05717363 0.03677067
## 61      0 4.328761e+00 0.7470696 -0.011627907 0.05717363 0.03677067
## 62      0 4.977024e+00 0.7470696 -0.011627907 0.05717363 0.03677067
## 63      0 5.722368e+00 0.7554029  0.000000000 0.04974260 0.00000000
## 64      0 6.579332e+00 0.7554029  0.000000000 0.04974260 0.00000000
## 65      0 7.564633e+00 0.7554029  0.000000000 0.04974260 0.00000000
## 66      0 8.697490e+00 0.7554029  0.000000000 0.04974260 0.00000000
## 67      0 1.000000e+01 0.7554029  0.000000000 0.04974260 0.00000000
## 68      0 1.149757e+01 0.7554029  0.000000000 0.04974260 0.00000000
## 69      0 1.321941e+01 0.7554029  0.000000000 0.04974260 0.00000000
## 70      0 1.519911e+01 0.7554029  0.000000000 0.04974260 0.00000000
## 71      0 1.747528e+01 0.7554029  0.000000000 0.04974260 0.00000000
## 72      0 2.009233e+01 0.7554029  0.000000000 0.04974260 0.00000000
## 73      0 2.310130e+01 0.7554029  0.000000000 0.04974260 0.00000000
## 74      0 2.656088e+01 0.7554029  0.000000000 0.04974260 0.00000000
## 75      0 3.053856e+01 0.7554029  0.000000000 0.04974260 0.00000000
## 76      0 3.511192e+01 0.7554029  0.000000000 0.04974260 0.00000000
## 77      0 4.037017e+01 0.7554029  0.000000000 0.04974260 0.00000000
## 78      0 4.641589e+01 0.7554029  0.000000000 0.04974260 0.00000000
## 79      0 5.336699e+01 0.7554029  0.000000000 0.04974260 0.00000000
## 80      0 6.135907e+01 0.7554029  0.000000000 0.04974260 0.00000000
## 81      0 7.054802e+01 0.7554029  0.000000000 0.04974260 0.00000000
## 82      0 8.111308e+01 0.7554029  0.000000000 0.04974260 0.00000000
## 83      0 9.326033e+01 0.7554029  0.000000000 0.04974260 0.00000000
## 84      0 1.072267e+02 0.7554029  0.000000000 0.04974260 0.00000000
## 85      0 1.232847e+02 0.7554029  0.000000000 0.04974260 0.00000000
## 86      0 1.417474e+02 0.7554029  0.000000000 0.04974260 0.00000000
## 87      0 1.629751e+02 0.7554029  0.000000000 0.04974260 0.00000000
## 88      0 1.873817e+02 0.7554029  0.000000000 0.04974260 0.00000000
## 89      0 2.154435e+02 0.7554029  0.000000000 0.04974260 0.00000000
## 90      0 2.477076e+02 0.7554029  0.000000000 0.04974260 0.00000000
## 91      0 2.848036e+02 0.7554029  0.000000000 0.04974260 0.00000000
## 92      0 3.274549e+02 0.7554029  0.000000000 0.04974260 0.00000000
## 93      0 3.764936e+02 0.7554029  0.000000000 0.04974260 0.00000000
## 94      0 4.328761e+02 0.7554029  0.000000000 0.04974260 0.00000000
## 95      0 4.977024e+02 0.7554029  0.000000000 0.04974260 0.00000000
## 96      0 5.722368e+02 0.7554029  0.000000000 0.04974260 0.00000000
## 97      0 6.579332e+02 0.7554029  0.000000000 0.04974260 0.00000000
## 98      0 7.564633e+02 0.7554029  0.000000000 0.04974260 0.00000000
## 99      0 8.697490e+02 0.7554029  0.000000000 0.04974260 0.00000000
## 100     0 1.000000e+03 0.7554029  0.000000000 0.04974260 0.00000000
#Visualize accuracy versus values of C
plot(model_ridge)

#Obtain metrics of accuracy from training
confusionMatrix(model_ridge)
## Cross-Validated (10 fold) Confusion Matrix 
## 
## (entries are percentual average cell counts across resamples)
##  
##           Reference
## Prediction    1    2    3
##          1  0.0  0.0  0.0
##          2 19.8 75.4  4.8
##          3  0.0  0.0  0.0
##                            
##  Accuracy (average) : 0.754
varImp(model_ridge)
## glmnet variable importance
## 
##   variables are sorted by maximum importance across the classes
##   only 20 most important variables shown (out of 59)
## 
##           1     2      3
## q176 100.00 94.06  5.941
## q522  59.37 53.33  6.037
## q178  51.76 45.62  6.137
## q502  51.76 45.62  6.137
## q654  30.52 40.54 10.019
## q472  39.86 33.56  6.293
## q495  37.93 31.94  5.989
## q213  37.93 31.94  5.989
## q532  37.93 31.94  5.989
## q417  37.93 31.94  5.989
## q897  25.15 11.07 36.223
## q872  32.75 35.84  3.094
## q475  17.53 33.00 15.470
## q474  17.53 33.00 15.470
## q493  25.15 31.19  6.037
## q216  24.95 30.94  5.989
## q494  24.95 30.94  5.989
## q896  24.75 30.69  5.941
## q416  24.75 30.69  5.941
## q175  24.75 30.69  5.941
plot(varImp(model_ridge))

var_importance <- varImp(model_ridge)

var_importance$importance %>% 
  janitor::clean_names() %>% 
  filter(x1 > 50) %>% 
  select(x1) %>% 
  arrange(desc(x1))
##             x1
## q176 100.00000
## q522  59.36569
## q178  51.75518
## q502  51.75518
var_importance$importance %>% 
  janitor::clean_names() %>% 
  filter(x2 > 50) %>% 
  select(x2) %>% 
  arrange(desc(x2))
##            x2
## q176 94.05941
## q522 53.32850
var_importance$importance %>% 
  janitor::clean_names() %>% 
  filter(x3 > 50) %>% 
  select(x3) %>% 
  arrange(desc(x3))
## [1] x3
## <0 rows> (or 0-length row.names)
var_import = varImp(model_ridge)$importance

West Virginia (WV)

df_wv = df_fit_state %>% 
  filter(sitename == "West Virginia (WV)") %>% 
  select(-sitename, -year)


set.seed(123)
#Create grid to search lambda
lambda <- 10^seq(-3,3, length = 100)

# Specify training control
train_control_ridge <- trainControl(method = "cv", number = 10)

model_ridge = train(q85 ~.,
                    df_wv, 
                    method = "glmnet", 
                    trControl = train_control_ridge, 
                    tuneGrid = expand.grid(alpha = 0, lambda = lambda))

summary(model_ridge)
##             Length Class      Mode     
## a0          300    -none-     numeric  
## beta          3    -none-     list     
## dfmat       300    -none-     numeric  
## df          100    -none-     numeric  
## dim           2    -none-     numeric  
## lambda      100    -none-     numeric  
## dev.ratio   100    -none-     numeric  
## nulldev       1    -none-     numeric  
## npasses       1    -none-     numeric  
## jerr          1    -none-     numeric  
## offset        1    -none-     logical  
## classnames    3    -none-     character
## grouped       1    -none-     logical  
## call          5    -none-     call     
## nobs          1    -none-     numeric  
## lambdaOpt     1    -none-     numeric  
## xNames       59    -none-     character
## problemType   1    -none-     character
## tuneValue     2    data.frame list     
## obsLevels     3    -none-     character
## param         0    -none-     list
model_ridge$bestTune
##    alpha    lambda
## 42     0 0.3053856
model_ridge$results
##     alpha       lambda  Accuracy      Kappa AccuracySD    KappaSD
## 1       0 1.000000e-03 0.6581281 0.16024555 0.08105490 0.18670848
## 2       0 1.149757e-03 0.6581281 0.16024555 0.08105490 0.18670848
## 3       0 1.321941e-03 0.6581281 0.16024555 0.08105490 0.18670848
## 4       0 1.519911e-03 0.6581281 0.16024555 0.08105490 0.18670848
## 5       0 1.747528e-03 0.6581281 0.16024555 0.08105490 0.18670848
## 6       0 2.009233e-03 0.6581281 0.16024555 0.08105490 0.18670848
## 7       0 2.310130e-03 0.6581281 0.16024555 0.08105490 0.18670848
## 8       0 2.656088e-03 0.6581281 0.16024555 0.08105490 0.18670848
## 9       0 3.053856e-03 0.6581281 0.16024555 0.08105490 0.18670848
## 10      0 3.511192e-03 0.6581281 0.16024555 0.08105490 0.18670848
## 11      0 4.037017e-03 0.6581281 0.16024555 0.08105490 0.18670848
## 12      0 4.641589e-03 0.6581281 0.16024555 0.08105490 0.18670848
## 13      0 5.336699e-03 0.6581281 0.16024555 0.08105490 0.18670848
## 14      0 6.135907e-03 0.6581281 0.16024555 0.08105490 0.18670848
## 15      0 7.054802e-03 0.6581281 0.16024555 0.08105490 0.18670848
## 16      0 8.111308e-03 0.6581281 0.16024555 0.08105490 0.18670848
## 17      0 9.326033e-03 0.6581281 0.16024555 0.08105490 0.18670848
## 18      0 1.072267e-02 0.6616995 0.16148486 0.08451712 0.19167171
## 19      0 1.232847e-02 0.6652709 0.16734105 0.08928321 0.19751430
## 20      0 1.417474e-02 0.6793103 0.18415400 0.07982728 0.18939794
## 21      0 1.629751e-02 0.6757389 0.16813953 0.08879539 0.20056449
## 22      0 1.873817e-02 0.6722906 0.14691702 0.08725504 0.19625151
## 23      0 2.154435e-02 0.6864532 0.16527680 0.08458319 0.20223221
## 24      0 2.477076e-02 0.6830049 0.15324694 0.08818819 0.21037388
## 25      0 2.848036e-02 0.6864532 0.15804904 0.08458319 0.20632283
## 26      0 3.274549e-02 0.6864532 0.15804904 0.08458319 0.20632283
## 27      0 3.764936e-02 0.6864532 0.15895452 0.08624247 0.20907978
## 28      0 4.328761e-02 0.6864532 0.15895452 0.08624247 0.20907978
## 29      0 4.977024e-02 0.6864532 0.15895452 0.08624247 0.20907978
## 30      0 5.722368e-02 0.6864532 0.15550624 0.08624247 0.21162956
## 31      0 6.579332e-02 0.6900246 0.15716149 0.08156237 0.20322173
## 32      0 7.564633e-02 0.6865764 0.13786882 0.08545542 0.22069193
## 33      0 8.697490e-02 0.6865764 0.12461467 0.07671633 0.19808966
## 34      0 1.000000e-01 0.6865764 0.12461467 0.07671633 0.19808966
## 35      0 1.149757e-01 0.6900246 0.13121630 0.07932294 0.20290998
## 36      0 1.321941e-01 0.6866995 0.10039810 0.06438635 0.16565325
## 37      0 1.519911e-01 0.6938424 0.11082005 0.05824948 0.15897291
## 38      0 1.747528e-01 0.7009852 0.12394185 0.06050463 0.16314300
## 39      0 2.009233e-01 0.7080049 0.14217612 0.06028602 0.17125450
## 40      0 2.310130e-01 0.7221675 0.16652098 0.05643626 0.17468217
## 41      0 2.656088e-01 0.7221675 0.15402098 0.05643626 0.18229022
## 42      0 3.053856e-01 0.7256158 0.15930902 0.05026280 0.17594085
## 43      0 3.511192e-01 0.7149015 0.11412360 0.04427126 0.15836639
## 44      0 4.037017e-01 0.7041872 0.05930698 0.03770789 0.12890644
## 45      0 4.641589e-01 0.7007389 0.04536715 0.03352127 0.10644567
## 46      0 5.336699e-01 0.7078818 0.05647826 0.02674176 0.09328230
## 47      0 6.135907e-01 0.7044335 0.03640527 0.02663568 0.08895164
## 48      0 7.054802e-01 0.7044335 0.03640527 0.02663568 0.08895164
## 49      0 8.111308e-01 0.7045567 0.02695916 0.02504071 0.07471474
## 50      0 9.326033e-01 0.7080049 0.03279857 0.01917082 0.06939520
## 51      0 1.072267e+00 0.7080049 0.03279857 0.01917082 0.06939520
## 52      0 1.232847e+00 0.7080049 0.03279857 0.01917082 0.06939520
## 53      0 1.417474e+00 0.7008621 0.00000000 0.01453051 0.00000000
## 54      0 1.629751e+00 0.7008621 0.00000000 0.01453051 0.00000000
## 55      0 1.873817e+00 0.7008621 0.00000000 0.01453051 0.00000000
## 56      0 2.154435e+00 0.7008621 0.00000000 0.01453051 0.00000000
## 57      0 2.477076e+00 0.7008621 0.00000000 0.01453051 0.00000000
## 58      0 2.848036e+00 0.7008621 0.00000000 0.01453051 0.00000000
## 59      0 3.274549e+00 0.7008621 0.00000000 0.01453051 0.00000000
## 60      0 3.764936e+00 0.7008621 0.00000000 0.01453051 0.00000000
## 61      0 4.328761e+00 0.7008621 0.00000000 0.01453051 0.00000000
## 62      0 4.977024e+00 0.7008621 0.00000000 0.01453051 0.00000000
## 63      0 5.722368e+00 0.7008621 0.00000000 0.01453051 0.00000000
## 64      0 6.579332e+00 0.7008621 0.00000000 0.01453051 0.00000000
## 65      0 7.564633e+00 0.7008621 0.00000000 0.01453051 0.00000000
## 66      0 8.697490e+00 0.7008621 0.00000000 0.01453051 0.00000000
## 67      0 1.000000e+01 0.7008621 0.00000000 0.01453051 0.00000000
## 68      0 1.149757e+01 0.7008621 0.00000000 0.01453051 0.00000000
## 69      0 1.321941e+01 0.7008621 0.00000000 0.01453051 0.00000000
## 70      0 1.519911e+01 0.7008621 0.00000000 0.01453051 0.00000000
## 71      0 1.747528e+01 0.7008621 0.00000000 0.01453051 0.00000000
## 72      0 2.009233e+01 0.7008621 0.00000000 0.01453051 0.00000000
## 73      0 2.310130e+01 0.7008621 0.00000000 0.01453051 0.00000000
## 74      0 2.656088e+01 0.7008621 0.00000000 0.01453051 0.00000000
## 75      0 3.053856e+01 0.7008621 0.00000000 0.01453051 0.00000000
## 76      0 3.511192e+01 0.7008621 0.00000000 0.01453051 0.00000000
## 77      0 4.037017e+01 0.7008621 0.00000000 0.01453051 0.00000000
## 78      0 4.641589e+01 0.7008621 0.00000000 0.01453051 0.00000000
## 79      0 5.336699e+01 0.7008621 0.00000000 0.01453051 0.00000000
## 80      0 6.135907e+01 0.7008621 0.00000000 0.01453051 0.00000000
## 81      0 7.054802e+01 0.7008621 0.00000000 0.01453051 0.00000000
## 82      0 8.111308e+01 0.7008621 0.00000000 0.01453051 0.00000000
## 83      0 9.326033e+01 0.7008621 0.00000000 0.01453051 0.00000000
## 84      0 1.072267e+02 0.7008621 0.00000000 0.01453051 0.00000000
## 85      0 1.232847e+02 0.7008621 0.00000000 0.01453051 0.00000000
## 86      0 1.417474e+02 0.7008621 0.00000000 0.01453051 0.00000000
## 87      0 1.629751e+02 0.7008621 0.00000000 0.01453051 0.00000000
## 88      0 1.873817e+02 0.7008621 0.00000000 0.01453051 0.00000000
## 89      0 2.154435e+02 0.7008621 0.00000000 0.01453051 0.00000000
## 90      0 2.477076e+02 0.7008621 0.00000000 0.01453051 0.00000000
## 91      0 2.848036e+02 0.7008621 0.00000000 0.01453051 0.00000000
## 92      0 3.274549e+02 0.7008621 0.00000000 0.01453051 0.00000000
## 93      0 3.764936e+02 0.7008621 0.00000000 0.01453051 0.00000000
## 94      0 4.328761e+02 0.7008621 0.00000000 0.01453051 0.00000000
## 95      0 4.977024e+02 0.7008621 0.00000000 0.01453051 0.00000000
## 96      0 5.722368e+02 0.7008621 0.00000000 0.01453051 0.00000000
## 97      0 6.579332e+02 0.7008621 0.00000000 0.01453051 0.00000000
## 98      0 7.564633e+02 0.7008621 0.00000000 0.01453051 0.00000000
## 99      0 8.697490e+02 0.7008621 0.00000000 0.01453051 0.00000000
## 100     0 1.000000e+03 0.7008621 0.00000000 0.01453051 0.00000000
#Visualize accuracy versus values of C
plot(model_ridge)

#Obtain metrics of accuracy from training
confusionMatrix(model_ridge)
## Cross-Validated (10 fold) Confusion Matrix 
## 
## (entries are percentual average cell counts across resamples)
##  
##           Reference
## Prediction    1    2    3
##          1  3.9  1.4  1.1
##          2 19.7 68.7  5.3
##          3  0.0  0.0  0.0
##                             
##  Accuracy (average) : 0.7254
varImp(model_ridge)
## glmnet variable importance
## 
##   variables are sorted by maximum importance across the classes
##   only 20 most important variables shown (out of 59)
## 
##                 1      2      3
## q897      100.000 84.340 15.660
## q896       93.989 80.940 13.049
## q176       57.837 69.579 11.741
## q495       56.820  8.947 65.767
## q174       49.985 31.169 18.817
## q417       41.711 48.931  7.221
## q493        2.538 44.812 47.350
## q215       35.001 47.179 12.178
## q178       30.266 47.095 16.829
## q216       47.033 38.168  8.865
## race43     31.967 43.752 11.786
## q893        6.779 41.562 34.783
## q416        8.927 32.616 41.543
## q496       39.550 27.804 11.746
## q895       36.884 23.440 13.444
## q175       35.431  8.698 26.733
## q214       33.587 16.081 17.506
## q894       13.139 27.887 14.749
## qnothhpl2  27.563 18.591  8.972
## race42     16.064 27.319 11.255
plot(varImp(model_ridge))

var_importance <- varImp(model_ridge)

var_importance$importance %>% 
  janitor::clean_names() %>% 
  filter(x1 > 50) %>% 
  select(x1) %>% 
  arrange(desc(x1))
##             x1
## q897 100.00000
## q896  93.98877
## q176  57.83746
## q495  56.82006
var_importance$importance %>% 
  janitor::clean_names() %>% 
  filter(x2 > 50) %>% 
  select(x2) %>% 
  arrange(desc(x2))
##            x2
## q897 84.34023
## q896 80.94003
## q176 69.57892
var_importance$importance %>% 
  janitor::clean_names() %>% 
  filter(x3 > 50) %>% 
  select(x3) %>% 
  arrange(desc(x3))
##            x3
## q495 65.76748
var_import = varImp(model_ridge)$importance